query
stringlengths
7
3.85k
document
stringlengths
11
430k
metadata
dict
negatives
sequencelengths
0
101
negative_scores
sequencelengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Print dumps the histogram (and counter) to the provided writer. Also calculates the percentile.
func (e *HistogramData) Print(out io.Writer, msg string) { if len(e.Data) == 0 { _, _ = fmt.Fprintf(out, "%s : no data\n", msg) // nolint: gas return } // the base counter part: _, _ = fmt.Fprintf(out, "%s : count %d avg %.8g +/- %.4g min %g max %g sum %.9g\n", msg, e.Count, e.Avg, e.StdDev, e.Min, e.Max, e.Sum) _, _ = fmt.Fprintln(out, "# range, mid point, percentile, count") sep := ">=" for i, b := range e.Data { if i > 0 { sep = ">" // last interval is inclusive (of max value) } _, _ = fmt.Fprintf(out, "%s %.6g <= %.6g , %.6g , %.2f, %d\n", sep, b.Start, b.End, (b.Start+b.End)/2., b.Percent, b.Count) } // print the information of target percentiles for _, p := range e.Percentiles { _, _ = fmt.Fprintf(out, "# target %g%% %.6g\n", p.Percentile, p.Value) // nolint: gas } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (h *Histogram) Print(out io.Writer, msg string, percentiles []float64) {\n\th.Export().CalcPercentiles(percentiles).Print(out, msg)\n}", "func (v HistogramValue) Print(w io.Writer) {\n\tavg := float64(v.Sum) / float64(v.Count)\n\tfmt.Fprintf(w, \"Count: %d Min: %d Max: %d Avg: %.2f\\n\", v.Count, v.Min, v.Max, avg)\n\tfmt.Fprintf(w, \"%s\\n\", strings.Repeat(\"-\", 60))\n\tif v.Count <= 0 {\n\t\treturn\n\t}\n\n\tmaxBucketDigitLen := len(strconv.FormatFloat(v.Buckets[len(v.Buckets)-1].LowBound, 'f', 6, 64))\n\tif maxBucketDigitLen < 3 {\n\t\t// For \"inf\".\n\t\tmaxBucketDigitLen = 3\n\t}\n\tmaxCountDigitLen := len(strconv.FormatInt(v.Count, 10))\n\tpercentMulti := 100 / float64(v.Count)\n\n\taccCount := int64(0)\n\tfor i, b := range v.Buckets {\n\t\tfmt.Fprintf(w, \"[%*f, \", maxBucketDigitLen, b.LowBound)\n\t\tif i+1 < len(v.Buckets) {\n\t\t\tfmt.Fprintf(w, \"%*f)\", maxBucketDigitLen, v.Buckets[i+1].LowBound)\n\t\t} else {\n\t\t\tfmt.Fprintf(w, \"%*s)\", maxBucketDigitLen, \"inf\")\n\t\t}\n\n\t\taccCount += b.Count\n\t\tfmt.Fprintf(w, \" %*d %5.1f%% %5.1f%%\", maxCountDigitLen, b.Count, float64(b.Count)*percentMulti, float64(accCount)*percentMulti)\n\n\t\tconst barScale = 0.1\n\t\tbarLength := int(float64(b.Count)*percentMulti*barScale + 0.5)\n\t\tfmt.Fprintf(w, \" %s\\n\", strings.Repeat(\"#\", barLength))\n\t}\n}", "func (m *Measurement) PrintStats(w io.Writer) {\n\ttype Hist struct {\n\t\t*Result\n\t\t*hrtime.Histogram\n\t}\n\n\thists := []Hist{}\n\tfor _, result := range m.Results {\n\t\thists = append(hists, Hist{\n\t\t\tResult: result,\n\t\t\tHistogram: hrtime.NewDurationHistogram(result.Durations, &hrtime.HistogramOptions{\n\t\t\t\tBinCount: 10,\n\t\t\t\tNiceRange: true,\n\t\t\t\tClampMaximum: 0,\n\t\t\t\tClampPercentile: 0.999,\n\t\t\t}),\n\t\t})\n\t}\n\n\tmsec := func(ns float64) string {\n\t\treturn fmt.Sprintf(\"%.2f\", ns/1e6)\n\t}\n\n\tfor _, hist := range hists {\n\t\tfmt.Fprintf(w, \"%v\\t%v\\t%v\\t%v\\t%v\\t%v\\t%v\\t%v\\n\",\n\t\t\tm.Parts, m.Segments, hist.Name,\n\t\t\tmsec(hist.Average),\n\t\t\tmsec(hist.Maximum),\n\t\t\tmsec(hist.P50),\n\t\t\tmsec(hist.P90),\n\t\t\tmsec(hist.P99),\n\t\t)\n\t}\n}", "func (hist *Histogram) WriteTo(w io.Writer) (int64, error) {\n\twritten, err := hist.WriteStatsTo(w)\n\tif err != nil {\n\t\treturn written, err\n\t}\n\n\t// TODO: use consistently single unit instead of multiple\n\tmaxCountLength := 3\n\tfor i := range hist.Bins {\n\t\tx := (int)(math.Ceil(math.Log10(float64(hist.Bins[i].Count + 1))))\n\t\tif x > maxCountLength {\n\t\t\tmaxCountLength = x\n\t\t}\n\t}\n\n\tvar n int\n\tfor _, bin := range hist.Bins {\n\t\tif bin.andAbove {\n\t\t\tn, err = fmt.Fprintf(w, \" %10v+[%[2]*[3]v] \", time.Duration(round(bin.Start, 3)), maxCountLength, bin.Count)\n\t\t} else {\n\t\t\tn, err = fmt.Fprintf(w, \" %10v [%[2]*[3]v] \", time.Duration(round(bin.Start, 3)), maxCountLength, bin.Count)\n\t\t}\n\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\n\t\twidth := float64(hist.Width) * bin.Width\n\t\tfrac := width - math.Trunc(width)\n\n\t\tn, err = io.WriteString(w, strings.Repeat(\"█\", int(width)))\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\n\t\tif frac > 0.5 {\n\t\t\tn, err = io.WriteString(w, `▌`)\n\t\t\twritten += int64(n)\n\t\t\tif err != nil {\n\t\t\t\treturn written, err\n\t\t\t}\n\t\t}\n\n\t\tn, err = fmt.Fprintf(w, \"\\n\")\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\t}\n\treturn written, nil\n}", "func printStats(stats []statisic, hash string) {\n\tw := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.AlignRight|tabwriter.Debug)\n\n\tfmt.Fprintf(w, \"%s(w=%d):\\n\", hash, sketchWidth)\n\tfmt.Fprintf(w, \"data set\\tmax. abs.\\tavg. abs.\\tmax. rel.\\tavg. rel.\\t# exact\\n\")\n\n\tfor i := 0; i < len(stats); i++ {\n\t\tstat := stats[i]\n\t\tfmt.Fprintf(w, \"%s\\t%d\\t%d\\t%.2f\\t%.2f\\t%d\\n\", filePaths[i], stat.maxAbs, stat.avgAbs, stat.maxRel, stat.avgRel, 100-stat.misses)\n\t}\n\tfmt.Fprintln(w)\n\tw.Flush()\n}", "func WriteMetrics(r metrics.Registry, w io.Writer) {\n\tvar namedMetrics namedMetricSlice\n\tr.Each(func(name string, i interface{}) {\n\t\tnamedMetrics = append(namedMetrics, namedMetric{name, i})\n\t})\n\n\tsort.Sort(namedMetrics)\n\tfor _, namedMetric := range namedMetrics {\n\t\tswitch metric := namedMetric.m.(type) {\n\t\tcase metrics.Counter:\n\t\t\tfmt.Fprintf(w, \"counter %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count: %9d\\n\", metric.Count())\n\t\tcase metrics.Gauge:\n\t\t\tfmt.Fprintf(w, \"gauge %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" value: %9d\\n\", metric.Value())\n\t\tcase metrics.GaugeFloat64:\n\t\t\tfmt.Fprintf(w, \"gauge %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" value: %f\\n\", metric.Value())\n\t\tcase metrics.Healthcheck:\n\t\t\tmetric.Check()\n\t\t\tfmt.Fprintf(w, \"healthcheck %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" error: %v\\n\", metric.Error())\n\t\tcase metrics.Histogram:\n\t\t\th := metric.Snapshot()\n\t\t\tps := h.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})\n\t\t\tfmt.Fprintf(w, \"histogram %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count=%d, mean=%.2f, stddef=%.2f\\n\", h.Count(), h.Mean(), h.StdDev())\n\t\t\tfmt.Fprintf(w, \" min=%.2fms median=%.2fms max=%.2fms\\n\",\n\t\t\t\tfloat64(h.Min())/float64(time.Millisecond),\n\t\t\t\tps[0]/float64(time.Millisecond),\n\t\t\t\tfloat64(h.Max())/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" %%iles (ms): 75=%.2f 95=%.2f 99=%.2f 99.9=%.2f\\n\",\n\t\t\t\tps[1]/float64(time.Millisecond),\n\t\t\t\tps[2]/float64(time.Millisecond),\n\t\t\t\tps[3]/float64(time.Millisecond),\n\t\t\t\tps[4]/float64(time.Millisecond))\n\t\tcase metrics.Meter:\n\t\t\tm := metric.Snapshot()\n\t\t\tfmt.Fprintf(w, \"meter %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count: %d\\n\", m.Count())\n\t\t\tfmt.Fprintf(w, \" rates: 1m=%.2f 5m=%.2f 15m=%.2f mean=%.2f\\n\", m.Rate1(), m.Rate5(), m.Rate15(), m.RateMean())\n\t\tcase metrics.Timer:\n\t\t\tt := metric.Snapshot()\n\t\t\tps := t.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})\n\t\t\tfmt.Fprintf(w, \"timer %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count=%d, mean=%.2fms, stddev=%.2fms\\n\",\n\t\t\t\tt.Count(), t.Mean()/float64(time.Millisecond), t.StdDev()/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" min=%.2fms median=%.2fms max=%.2fms\\n\",\n\t\t\t\tfloat64(t.Min())/float64(time.Millisecond),\n\t\t\t\tps[0]/float64(time.Millisecond),\n\t\t\t\tfloat64(t.Max())/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" %%iles (ms): 75=%.2f 95=%.2f 99=%.2f 99.9=%.2f\\n\",\n\t\t\t\tps[1]/float64(time.Millisecond),\n\t\t\t\tps[2]/float64(time.Millisecond),\n\t\t\t\tps[3]/float64(time.Millisecond),\n\t\t\t\tps[4]/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" rates: 1m=%.2f 5m=%.2f 15m=%.2f mean=%.2f\\n\", t.Rate1(), t.Rate5(), t.Rate15(), t.RateMean())\n\t\t}\n\t}\n}", "func (te *TelemetryEmitter) emitHistogram(metric Metric, timestamp time.Time) error {\n\thist, ok := metric.value.(*dto.Histogram)\n\tif !ok {\n\t\treturn fmt.Errorf(\"unknown histogram metric type for %q: %T\", metric.name, metric.value)\n\t}\n\n\tif m, ok := te.deltaCalculator.CountMetric(metric.name+\".sum\", metric.attributes, hist.GetSampleSum(), timestamp); ok {\n\t\tte.harvester.RecordMetric(m)\n\t}\n\n\tmetricName := metric.name + \".buckets\"\n\tbuckets := make(histogram.Buckets, 0, len(hist.Bucket))\n\tfor _, b := range hist.GetBucket() {\n\t\tupperBound := b.GetUpperBound()\n\t\tcount := float64(b.GetCumulativeCount())\n\t\tif !math.IsInf(upperBound, 1) {\n\t\t\tbucketAttrs := copyAttrs(metric.attributes)\n\t\t\tbucketAttrs[\"histogram.bucket.upperBound\"] = upperBound\n\t\t\tif m, ok := te.deltaCalculator.CountMetric(metricName, bucketAttrs, count, timestamp); ok {\n\t\t\t\tte.harvester.RecordMetric(m)\n\t\t\t}\n\t\t}\n\t\tbuckets = append(\n\t\t\tbuckets,\n\t\t\thistogram.Bucket{\n\t\t\t\tUpperBound: upperBound,\n\t\t\t\tCount: count,\n\t\t\t},\n\t\t)\n\t}\n\n\tvar results error\n\tmetricName = metric.name + \".percentiles\"\n\tfor _, p := range te.percentiles {\n\t\tv, err := histogram.Percentile(p, buckets)\n\t\tif err != nil {\n\t\t\tif results == nil {\n\t\t\t\tresults = err\n\t\t\t} else {\n\t\t\t\tresults = fmt.Errorf(\"%v: %w\", err, results)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tpercentileAttrs := copyAttrs(metric.attributes)\n\t\tpercentileAttrs[\"percentile\"] = p\n\t\tte.harvester.RecordMetric(telemetry.Gauge{\n\t\t\tName: metricName,\n\t\t\tAttributes: percentileAttrs,\n\t\t\tValue: v,\n\t\t\tTimestamp: timestamp,\n\t\t})\n\t}\n\n\treturn results\n}", "func (w *progressWriter) Write(p []byte) (n int, e error) {\n\tn = len(p)\n\tw.current += int64(n)\n\tif !w.silent {\n\t\tpercent := float64(w.current) * 100 / float64(w.total)\n\t\tfmt.Printf(\"\\rReceived %d bytes in %d (%d%%)\", w.current, w.total, int(percent))\n\t}\n\treturn\n}", "func (s Set) PrintBasicHistogram() {\r\n\tsorted := make(Set, len(s))\r\n\tcopy(sorted, s)\r\n\tsorted.Sort()\r\n\r\n\tstd := s.GetStd()\r\n\tmean := s.GetMean()\r\n\r\n\tfor s := -3.; s < 3; s++ {\r\n\t\tmin, max := mean+s*std, mean+(s+1)*std\r\n\t\tgap := 100.0 * (float64(sorted.IndicesBetween(min, max)) / float64(len(sorted)))\r\n\t\tfmt.Printf(\"Between %v and %v stds: %v%% of the data\\n\", s, s+1, gap)\r\n\t}\r\n}", "func (h *PCPHistogram) Percentile(p float64) int64 { return h.h.ValueAtQuantile(p) }", "func (hist *Histogram) WriteStatsTo(w io.Writer) (int64, error) {\n\tn, err := fmt.Fprintf(w, \" avg %v; min %v; p50 %v; max %v;\\n p90 %v; p99 %v; p999 %v; p9999 %v;\\n\",\n\t\ttime.Duration(truncate(hist.Average, 3)),\n\t\ttime.Duration(truncate(hist.Minimum, 3)),\n\t\ttime.Duration(truncate(hist.P50, 3)),\n\t\ttime.Duration(truncate(hist.Maximum, 3)),\n\n\t\ttime.Duration(truncate(hist.P90, 3)),\n\t\ttime.Duration(truncate(hist.P99, 3)),\n\t\ttime.Duration(truncate(hist.P999, 3)),\n\t\ttime.Duration(truncate(hist.P9999, 3)),\n\t)\n\treturn int64(n), err\n}", "func (pw *ProgressWriter) Write(b []byte) (int, error) {\n\tif pw.done == 0 {\n\t\tpw.start = time.Now()\n\t}\n\tpw.done += int64(len(b))\n\tpercent := int((pw.done * 100) / pw.total)\n\twidth := 10\n\tprogress := (width * percent) / 100\n\tbps := float64(pw.done) / time.Now().Sub(pw.start).Seconds()\n\tbar := fmt.Sprintf(\"%-*s\", width, strings.Repeat(\"#\", int(progress)))\n\tspeed := fmt.Sprintf(\"%s/s %3.3s%%\", human.ByteSize(bps), strconv.Itoa(percent))\n\tpw.pm.Working(pw.key, bar, speed)\n\t// time.Sleep(2.4e7)\n\treturn pw.w.Write(b)\n}", "func printReport(r reports.Report, w io.Writer, minPriority int8, pal *palette) {\n\n\tc.Fprint(w, \"\\n\")\n\tc.Fprint(w, c.FHeader(r.Title))\n\tc.Fprint(w, \"\\n\")\n\n\tfor _, ch := range r.Chunks {\n\t\tif ch.Priority >= minPriority {\n\t\t\tprintChunk(ch, w, minPriority, pal)\n\t\t}\n\t}\n\n\tc.Fprint(w, \"\\n\")\n}", "func writeStats(to *os.File, final bool, s, t stats.Stats) {\n\tp := fmt.Fprintf\n\tpn := prettyNumber\n\tpb := prettyNumBytes\n\tpl := prettyLatency\n\tpt := prettyTimeStamp\n\tif final {\n\t\twriteStatsHeader(to)\n\t\tp(to, statsPrintHeader, pt(), \"Put\",\n\t\t\tpn(t.TotalPuts()),\n\t\t\tpb(t.TotalPutBytes()),\n\t\t\tpl(t.MinPutLatency(), t.AvgPutLatency(), t.MaxPutLatency()),\n\t\t\tpb(t.PutThroughput(time.Now())),\n\t\t\tpn(t.TotalErrPuts()))\n\t\tp(to, statsPrintHeader, pt(), \"Get\",\n\t\t\tpn(t.TotalGets()),\n\t\t\tpb(t.TotalGetBytes()),\n\t\t\tpl(t.MinGetLatency(), t.AvgGetLatency(), t.MaxGetLatency()),\n\t\t\tpb(t.GetThroughput(time.Now())),\n\t\t\tpn(t.TotalErrGets()))\n\t} else {\n\t\t// show interval stats; some fields are shown of both interval and total, for example, gets, puts, etc\n\t\tif s.TotalPuts() != 0 {\n\t\t\tp(to, statsPrintHeader, pt(), \"Put\",\n\t\t\t\tpn(s.TotalPuts())+\"(\"+pn(t.TotalPuts())+\")\",\n\t\t\t\tpb(s.TotalPutBytes())+\"(\"+pb(t.TotalPutBytes())+\")\",\n\t\t\t\tpl(s.MinPutLatency(), s.AvgPutLatency(), s.MaxPutLatency()),\n\t\t\t\tpb(s.PutThroughput(time.Now()))+\"(\"+pb(t.PutThroughput(time.Now()))+\")\",\n\t\t\t\tpn(s.TotalErrPuts())+\"(\"+pn(t.TotalErrPuts())+\")\")\n\t\t}\n\t\tif s.TotalGets() != 0 {\n\t\t\tp(to, statsPrintHeader, pt(), \"Get\",\n\t\t\t\tpn(s.TotalGets())+\"(\"+pn(t.TotalGets())+\")\",\n\t\t\t\tpb(s.TotalGetBytes())+\"(\"+pb(t.TotalGetBytes())+\")\",\n\t\t\t\tpl(s.MinGetLatency(), s.AvgGetLatency(), s.MaxGetLatency()),\n\t\t\t\tpb(s.GetThroughput(time.Now()))+\"(\"+pb(t.GetThroughput(time.Now()))+\")\",\n\t\t\t\tpn(s.TotalErrGets())+\"(\"+pn(t.TotalErrGets())+\")\")\n\t\t}\n\t}\n}", "func DownloadReport(w io.Writer, pct, count, total int64) int64 {\n\tpct_ := count * 100 / total\n\tif pct_ > pct {\n\t\tfmt.Fprintf(w, \" ...%d MB written (%d%%)\\n\", count/1e6, pct_)\n\t}\n\treturn pct_\n}", "func (w *StatsDWriter) Write(results Summary) error {\n\tfor k, v := range results {\n\t\t_, err := fmt.Fprintf(w.writer, \"%s:%d|s\\n\", k, v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func WithProgress(writer io.Writer, reader io.Reader, prefix string) error {\n\tdefer color.Unset()\n\n\tvar printed bool\n\n\tdefer func() {\n\t\tif printed {\n\t\t\tfmt.Println()\n\t\t}\n\t}()\n\n\twsz, _ := term.GetWinsize(0)\n\n\trd := bufio.NewReaderSize(reader, readerSize)\n\n\tcount := float64(0)\n\tbuf := make([]byte, readerSize)\n\tt := time.Now()\n\tfor {\n\t\trn, err := rd.Read(buf)\n\t\tcount += float64(rn)\n\n\t\tif err == io.EOF {\n\t\t\tif rn > 0 {\n\t\t\t\tgoto write\n\t\t\t} else {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif NoOut {\n\t\t\tgoto write\n\t\t}\n\n\t\tif time.Since(t) > 100*time.Millisecond && !NoTTY && wsz.Width != 0 {\n\t\t\tprinted = true\n\t\t\tfmt.Print(\"\\r\")\n\n\t\t\tmbs := fmt.Sprintf(\"%.02fMB\", count/megaByte)\n\n\t\t\tcolor.New(color.FgWhite, color.Bold).Printf(\"+++ \")\n\n\t\t\tjustifiedWidth := int(wsz.Width) - len(mbs) - 9\n\t\t\tif justifiedWidth < 0 {\n\t\t\t\tgoto write\n\t\t\t}\n\n\t\t\tif len(prefix) > int(justifiedWidth) {\n\t\t\t\tprefix = prefix[:int(justifiedWidth)] + \"...\"\n\t\t\t}\n\n\t\t\tcolor.New(color.FgRed, color.Bold).Printf(\"%s: \", prefix)\n\t\t\tcolor.New(color.FgWhite).Print(mbs)\n\n\t\t\tt = time.Now()\n\t\t}\n\n\twrite:\n\t\t_, werr := writer.Write(buf[:rn])\n\t\tif werr != nil {\n\t\t\treturn werr\n\t\t}\n\n\t\tif err == io.EOF {\n\t\t\treturn nil\n\t\t}\n\t}\n}", "func fprintStats(w io.Writer, q *QueryBenchmarker) {\n\tmaxKeyLength := 0\n\tkeys := make([]string, 0, len(q.statMapping))\n\tfor k := range q.statMapping {\n\t\tif len(k) > maxKeyLength {\n\t\t\tmaxKeyLength = len(k)\n\t\t}\n\t\tkeys = append(keys, k)\n\t}\n\tsort.Strings(keys)\n\tfor _, k := range keys {\n\t\tv := q.statMapping[k]\n\t\tminRate := 1e3 / v.Min\n\t\tmeanRate := 1e3 / v.Mean\n\t\tmaxRate := 1e3 / v.Max\n\t\tpaddedKey := fmt.Sprintf(\"%s\", k)\n\t\tfor len(paddedKey) < maxKeyLength {\n\t\t\tpaddedKey += \" \"\n\t\t}\n\t\tkStats := make(map[string]interface{})\n\t\tkStats[\"min\"] = v.Min\n\t\tkStats[\"minRate\"] = minRate\n\t\tkStats[\"mean\"] = v.Mean\n\t\tkStats[\"meanRate\"] = meanRate\n\t\tkStats[\"max\"] = v.Max\n\t\tkStats[\"maxRate\"] = maxRate\n\t\tkStats[\"count\"] = v.Count\n\t\tkStats[\"sum\"] = v.Sum / 1e3\n\t\tq.json[k] = kStats\n\t\tif !q.doJson {\n\t\t\t_, err := fmt.Fprintf(w, \"%s : min: %8.2fms (%7.2f/sec), mean: %8.2fms (%7.2f/sec), max: %7.2fms (%6.2f/sec), count: %8d, sum: %5.1fsec \\n\", paddedKey, v.Min, minRate, v.Mean, meanRate, v.Max, maxRate, v.Count, v.Sum/1e3)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t}\n\t}\n\tq.json[\"totalQueries\"] = q.totalQueries\n\tq.json[\"wallClockTime\"] = q.wallTook.Seconds()\n\tq.json[\"queryRate\"] = float64(q.totalQueries) / float64(q.wallTook.Seconds())\n\tq.json[\"workers\"] = q.workers\n\tq.json[\"batchSize\"] = q.batchSize\n\tif q.doJson {\n\t\tfor k, v := range q.json {\n\t\t\tif _, err := json.Marshal(v); err != nil {\n\t\t\t\tq.json[k] = \"\"\n\t\t\t}\n\t\t}\n\t\tb, err := json.Marshal(q.json)\n\t\tif err != nil {\n\t\t\tlog.Println(\"error:\", err)\n\t\t}\n\t\tos.Stdout.Write(b)\n\t}\n}", "func WriteHistogramSummary(scope *Scope, writer tf.Output, step tf.Output, tag tf.Output, values tf.Output) (o *tf.Operation) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"WriteHistogramSummary\",\n\t\tInput: []tf.Input{\n\t\t\twriter, step, tag, values,\n\t\t},\n\t}\n\treturn scope.AddOperation(opspec)\n}", "func (b *bar) writer() {\n\tb.update()\n\tfor {\n\t\tselect {\n\t\tcase <-b.finishChan:\n\t\t\treturn\n\t\tcase <-time.After(b.opts.RefreshRate):\n\t\t\tb.update()\n\t\t}\n\t}\n}", "func writeStatsToFile(posts []model.RedditPost) error {\n\n\tgroupedPosts := groupBySubreddit(posts)\n\n\tcountList := [][]model.RedditPost{}\n\n\t// convert to list\n\tfor _, v := range groupedPosts {\n\t\tcountList = append(countList, v)\n\t}\n\n\t// sort by post count\n\tsort.Slice(countList, func(i, j int) bool {\n\t\treturn len(countList[i]) > len(countList[j])\n\t})\n\n\tdata := [][]string{}\n\n\tfor _, v := range countList {\n\t\tescapedTitle := strings.ReplaceAll(v[0].Title, \"|\", \"\\\\|\")\n\t\ttitle := \"[\" + escapedTitle + \"]\" + \"(https://www.reddit.com\" + v[0].Permalink + \")\"\n\t\tdata = append(data, []string{v[0].Subreddit, strconv.Itoa(len(v)), title, strconv.Itoa(v[0].Score)})\n\t}\n\n\tfile, _ := os.Create(\"README.md\")\n\n\ttable := tablewriter.NewWriter(file)\n\ttable.SetAutoWrapText(false)\n\ttable.SetHeader([]string{\"Subreddit\", \"Total\", \"Top Post\", \"Score\"})\n\ttable.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false})\n\ttable.SetCenterSeparator(\"|\")\n\ttable.AppendBulk(data) // Add Bulk Data\n\ttable.Render()\n\n\treturn nil\n}", "func (wc WriteCounter) PrintProgress() {\n\t// Clear the line by using a character return to go back to the start and remove\n\t// the remaining characters by filling it with spaces\n\tfmt.Printf(\"\\r%s\", strings.Repeat(\" \", 50))\n\n\t// Return again and print current status of download\n\t// We use the humanize package to print the bytes in a meaningful way (e.g. 10 MB)\n\tfmt.Printf(\"\\rDownloading... %s complete\", humanize.Bytes(wc.Total))\n}", "func (wc writeCounter) PrintProgress() {\n\t// Clear the line by using a character return to go back to the start and remove\n\t// the remaining characters by filling it with spaces\n\tfmt.Printf(\"\\r%s\", strings.Repeat(\" \", 100))\n\n\t// Return again and print current status of download\n\t// We use the humanize package to print the bytes in a meaningful way (e.g. 10 MB)\n\tfmt.Printf(\"\\rDownloading (%s) %s complete\", wc.Name, humanize.Bytes(wc.Total))\n}", "func ( crawler *SingleCrawler ) Print() error {\n\n if err1 := IsOk(crawler); err1 != nil{\n return err1\n }\n\n stdout := os.Stdout\n outfile := stdout\n duped := true\n\n outfile, err := os.OpenFile( crawler.Filename, os.O_WRONLY | os.O_CREATE, 0644 )\n if err != nil {\n glog.Error(\"Unable to open requested file for writing. Defaulting to std out.\")\n duped = false\n } else{\n os.Stdout = outfile\n }\n\n fmt.Printf(\"SiteMap from starting URL %s, total pages found %d.\\n\\n\\n\", crawler.Site.String(), crawler.NumPages )\n for i := 0; i < crawler.NumPages; i++ {\n crawler.Sitemap[i].Print(crawler.PRINT_LIMIT)\n }\n\n if duped == true {\n outfile.Close()\n os.Stdout = stdout\n }\n\n return nil\n\n}", "func (c *LoggerClient) Histogram(name string, value float64) {\n\tc.print(\"Histogram\", name, value, value)\n}", "func Fprint(w io.Writer, a ...interface{}) (n int, err error) { return fmt.Fprint(w, a...) }", "func AnalyzeAllResponses(a *Analyzer, ar []*Response) {\n f, err := os.Create(\"swing-data.csv\")\n if err != nil { panic(err) }\n defer f.Close()\n w := csv.NewWriter(f)\n defer w.Flush()\n Range := histogram.Range(-1.0, 200, .01)\n h, err := histogram.NewHistogram(Range)\n if err != nil {\n panic(err)\n }\n for _, resp := range ar {\n for _, oi := range a.AnalyzeStock(resp) {\n var toWrite = []string{\n strconv.FormatFloat(oi.Swing, 'f', 4, 64),\n strconv.FormatFloat(oi.Ret, 'f', 4, 64),\n }\n w.Write(toWrite)\n h.Add(oi.Ret)\n }\n }\n fmt.Println(\"MEAN: \", h.Mean())\n fmt.Println(\"SIGMA \", h.Sigma())\n}", "func printResults() {\n\n\t// collect stats\n\ttotalLines := 0\n\ttotalCode := 0\n\ttotalComments := 0\n\tfor _, file := range filesStats {\n\t\ttotalLines = totalLines + file.TotalLines\n\t\ttotalCode = totalCode + file.CodeLines\n\t\ttotalComments = totalComments + file.CommentLines\n\t}\n\tfmt.Println(\"Overall stats:\")\n\tfmt.Printf(\" Number of files: %v\\n\", len(filesStats))\n\tfmt.Printf(\" Total lines: %v\\n\", totalLines)\n\tfmt.Printf(\" Code lines: %v\\n\", totalCode)\n\tfmt.Printf(\" Comment lines: %v\\n\", totalComments)\n\n\t// statistics for extensions\n\tif printExts == true {\n\t\t// NOTE: Sadly colprint accepts only slice, not map\n\t\t// thus conversion is needed.\n\t\ts := []*models.ExtensionStats{}\n\t\tfor _, e := range extensionStats {\n\t\t\ts = append(s, e)\n\t\t}\n\t\tfmt.Println(\"\\nStats by extensions\")\n\t\tcolprint.Print(s)\n\t\tfmt.Println()\n\t}\n\n\t// statistics for individual files\n\tif printFiles == true {\n\t\tfmt.Println(\"\\nStats by files:\")\n\t\tcolprint.Print(filesStats)\n\t}\n\tfmt.Println()\n}", "func PrintProgressBar(progress int64, complete int64) {\n amount := int((int64(progressBarLength-1) * progress) / complete)\n rest := (progressBarLength - 1) - amount\n bar := strings.Repeat(\"=\", amount) + \">\" + strings.Repeat(\".\", rest)\n _, err := os.Stdout.Write([]byte(\"Progress: [\" + bar + \"]\\r\"))\n Checkerr(err)\n}", "func (pt *ProgressWriter) Write(p []byte) (int, error) {\n\t// Do normal writer tasks\n\tn, err := pt.WriteCloser.Write(p)\n\n\t// Do the actual progress tracking\n\tif pt.Tracker != nil {\n\t\tpt.Tracker.total += int64(n)\n\t\tpt.Tracker.update(n)\n\t}\n\n\treturn n, err\n}", "func writeStatsHeader(to *os.File) {\n\tfmt.Fprintln(to)\n\tfmt.Fprintf(to, statsPrintHeader,\n\t\t\"Time\", \"OP\", \"Count\", \"Total Bytes\", \"Latency(min, avg, max)\", \"Throughput\", \"Error\")\n}", "func printDistribution(dmap map[string]int) {\n\tfor {\n\t\ttime.Sleep(time.Second * 5)\n\t\tvar total float64\n\t\tlock.Lock()\n\t\tkeys := []string{}\n\t\tfor k, n := range dmap {\n\t\t\ttotal = total + float64(n)\n\t\t\tkeys = append(keys, k)\n\t\t}\n\t\tsort.Strings(keys)\n\t\tfor _, k := range keys {\n\t\t\tn := dmap[k]\n\t\t\tvar nf float64 = float64(n)\n\t\t\tfmt.Printf(\"%s : %3.2f%% (%d/%d)\\n\", k, float64(nf*100.0)/total, n, int(total))\n\t\t\tdelete(dmap, k) // clear on read\n\t\t}\n\t\tlock.Unlock()\n\t\tfmt.Printf(\"------------------------\\n\")\n\t}\n}", "func (t *TimerSnapshot) Percentile(p float64) float64 {\n\treturn t.histogram.Percentile(p)\n}", "func (c *Collector) String() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\n\tif len(c.Buckets) == 0 {\n\t\treturn \"\"\n\t}\n\n\tnLen := printfLen(\"%.2f\", c.Min)\n\tif maxLen := printfLen(\"%.2f\", c.Max); maxLen > nLen {\n\t\tnLen = maxLen\n\t}\n\t// if c.Max is +Inf, the second-largest element can be the longest.\n\tif maxLen := printfLen(\"%.2f\", c.Buckets[len(c.Buckets)-1].Min); maxLen > nLen {\n\t\tnLen = maxLen\n\t}\n\n\tcLen := printfLen(\"%d\", c.Count)\n\tsLen := 0\n\n\tvar res strings.Builder\n\n\tfmt.Fprintf(&res, \"[%*s %*s] %*s total%%\", nLen, \"min\", nLen, \"max\", cLen, \"cnt\")\n\n\tif c.PrintSum {\n\t\tsLen = printfLen(\"%.2f\", c.Sum)\n\t\tfmt.Fprintf(&res, \" %*s\", sLen, \"sum\")\n\t}\n\n\tfmt.Fprintf(&res, \" (%d events)\\n\", c.Count)\n\n\tfor _, b := range c.Buckets {\n\t\tpercent := float64(100*b.Count) / float64(c.Count)\n\n\t\tfmt.Fprintf(&res, \"[%*.2f %*.2f] %*d %5.2f%%\", nLen, b.Min, nLen, b.Max, cLen, b.Count, percent)\n\n\t\tif c.PrintSum {\n\t\t\tfmt.Fprintf(&res, \" %*.2f\", sLen, b.Sum)\n\t\t}\n\n\t\tif dots := strings.Repeat(\".\", int(percent)); len(dots) > 0 {\n\t\t\tfmt.Fprint(&res, \" \", dots)\n\t\t}\n\n\t\tfmt.Fprintln(&res)\n\t}\n\n\treturn res.String()\n}", "func (benchmark *BenchmarkStat) PrintStats() {\n\tprintable := \"\"\n\tprintable += \"\\nSTATISTICS\\n\"\n\tprintable += \"===============\\n\"\n\tfor _, val := range benchmark.options {\n\t\tswitch val {\n\t\tcase Mean:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Mode:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Highest:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Lowest:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Sum:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Range:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase All:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tdefault:\n\t\t\tif _, err := benchmark.GetStat(val); err != nil {\n\t\t\t\tprintable += err.Error()\n\t\t\t}\n\n\t\t}\n\t}\n\n\ts := printable\n\tfmt.Println(s)\n}", "func (s *counts) Report(title string, opts *options) {\n\tif opts.ShowLines {\n\t\tfmt.Printf(\"%8v\", s.lines)\n\t}\n\tif opts.ShowWords {\n\t\tfmt.Printf(\"%8v\", s.words)\n\t}\n\tif opts.ShowChars {\n\t\tfmt.Printf(\"%8v\", s.chars)\n\t}\n\tfmt.Printf(\" %8v\\n\", title)\n}", "func (c *Contents) PrintSummary(head int) {\n\tglog.Infof(\"Loaded from %s\\n\", c.filePath)\n\tglog.Infof(\" %s\\n\", &c.Fingerprint)\n\n\tfor k, v := range c.Metrics {\n\t\tglog.Infof(\" metrics for weight_name: %s\", k)\n\t\tglog.Infof(\" exclude metrics meta %d count %d\\n\", v.Meta, len(v.CellMetrics))\n\t\tfor i := 0; i < len(v.CellMetrics); i++ {\n\t\t\tglog.Infof(\" metrics[%d] weights meta %d count %d\\n\", i, v.CellMetrics[i].WeightsMeta, len(v.CellMetrics[i].Weights))\n\t\t\tfor j := 0; j < head && j < len(v.CellMetrics[i].Weights); j++ {\n\t\t\t\tglog.Infof(\" metrics[%d] weigths[%d] %+v\", i, j, v.CellMetrics[i].Weights[j])\n\t\t\t}\n\n\t\t\tglog.Infof(\" metrics[%d] durations meta %d count %d\\n\", i, v.CellMetrics[i].DurationsMeta, len(v.CellMetrics[i].Durations))\n\t\t\tfor j := 0; j < head && j < len(v.CellMetrics[i].Durations); j++ {\n\t\t\t\tglog.Infof(\" metrics[%d] durations[%d] %+v\", i, j, v.CellMetrics[i].Durations[j])\n\t\t\t}\n\n\t\t\tglog.Infof(\" metrics[%d] distances meta %d count %d\\n\", i, v.CellMetrics[i].DistancesMeta, len(v.CellMetrics[i].Distances))\n\t\t\tfor j := 0; j < head && j < len(v.CellMetrics[i].Distances); j++ {\n\t\t\t\tglog.Infof(\" metrics[%d] distances[%d] %+v\", i, j, v.CellMetrics[i].Distances[j])\n\t\t\t}\n\t\t}\n\t}\n}", "func (m *Main) printStats() *time.Ticker {\n\tt := time.NewTicker(time.Second * 10)\n\tstart := time.Now()\n\tgo func() {\n\t\tfor range t.C {\n\t\t\tduration := time.Since(start)\n\t\t\tbytes := m.BytesProcessed()\n\t\t\tlog.Printf(\"Bytes: %s, Records: %v, Duration: %v, Rate: %v/s, %v rec/s\", pdk.Bytes(bytes), m.totalRecs.Get(), duration, pdk.Bytes(float64(bytes)/duration.Seconds()), float64(m.totalRecs.Get())/duration.Seconds())\n\t\t}\n\t}()\n\treturn t\n}", "func DrawProbabilitiesHistogramScore(p map[int]int) {\n\tvar keys []int\n\tmaxY := 0\n\tmaxX := 0\n\tfor k, freq := range p {\n\t\tkeys = append(keys, k)\n\t\tif maxY < freq {\n\t\t\tmaxY = freq\n\t\t}\n\t\tif maxX < k {\n\t\t\tmaxX = k\n\t\t}\n\t}\n\tsort.Ints(keys)\n\tfmt.Println()\n\tdigits := int(math.Log10(float64(maxX))) + 1\n\tfor i := maxY; i > 0; i-- {\n\t\t// Print Y axes.\n\t\tfmt.Printf(\"%v%%\", indentInt(i, 3))\n\t\tfor _, k := range keys {\n\t\t\tfmt.Print(\" \")\n\t\t\tif p[k] == i {\n\t\t\t\tfor j := 0; j < digits; j++ {\n\t\t\t\t\tfmt.Print(\"▄\")\n\t\t\t\t}\n\t\t\t} else if p[k] > i {\n\t\t\t\tfor j := 0; j < digits; j++ {\n\t\t\t\t\tfmt.Print(\"█\")\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfor j := 0; j < digits; j++ {\n\t\t\t\t\tfmt.Print(\" \")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tfmt.Print(\"\\n\")\n\t}\n\t// Print X axes.\n\tfmt.Print(\" \")\n\tfor _, k := range keys {\n\t\tfmt.Printf(\"%v\", indentInt(k, digits+1))\n\t}\n\tfmt.Println()\n}", "func (it *emptyIterator) AtHistogram() (int64, *histogram.Histogram) { return 0, nil }", "func (c *Client) Histogram(stat string, value int, rate float64) error {\n\treturn c.send(stat, rate, \"%d|ms\", value)\n}", "func Flush() error {\n\tif printer.Quiet {\n\t\treturn nil\n\t}\n\n\topts := printOpts{\n\t\tformat: printer.Format,\n\t\tsingle: printer.Single,\n\t\tnoNewline: printer.NoNewline,\n\t}\n\n\tcmd := printer.cmd\n\tif cmd != nil {\n\t\tshortStat, err := printer.cmd.Flags().GetBool(\"short-stat\")\n\t\tif err == nil && printer.cmd.Name() == \"list\" && printer.cmd.Parent().Name() != \"auth\" {\n\t\t\topts.shortStat = shortStat\n\t\t}\n\t}\n\n\tb, err := printer.linesToBytes(opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlines := lineCount(b)\n\n\tisTTY := checkInteractiveTerminal() == nil\n\tvar enablePager bool\n\ttermHeight, err := termHeight(os.Stdout)\n\tif err == nil {\n\t\tenablePager = isTTY && (termHeight < lines) // calculate if we should enable paging\n\t}\n\n\tpager := os.Getenv(\"PAGER\")\n\tif enablePager {\n\t\tenablePager = pager != \"\"\n\t}\n\n\topts.usePager = enablePager && printer.pager\n\topts.pagerPath = pager\n\n\terr = printer.printBytes(b, opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// after all, print errors\n\tprinter.printErrors()\n\n\tdefer func() {\n\t\tprinter.Lines = []interface{}{}\n\t\tprinter.ErrorLines = []interface{}{}\n\t}()\n\n\tif cmd == nil || cmd.Name() != \"list\" || printer.cmd.Parent().Name() == \"auth\" {\n\t\treturn nil\n\t}\n\n\t// the command is a list command, we may want to\n\t// take care of the stat flags\n\tnoStat, err := cmd.Flags().GetBool(\"no-stat\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// print stats\n\tswitch {\n\tcase noStat:\n\t\t// do nothing\n\tcase !opts.shortStat:\n\t\t// should not go to pager\n\t\tif isTTY && !enablePager {\n\t\t\tfmt.Fprintf(printer.eWriter, \"\\n\") // add a one line space before statistical data\n\t\t}\n\t\tfallthrough\n\tcase len(printer.Lines) > 0:\n\t\tentity := cmd.Parent().Name()\n\t\tcontainer := strings.TrimSuffix(printer.serverAddr, \"api/v4\")\n\t\tif container != \"\" {\n\t\t\tcontainer = fmt.Sprintf(\" on %s\", container)\n\t\t}\n\t\tfmt.Fprintf(printer.eWriter, \"There are %d %ss%s\\n\", len(printer.Lines), entity, container)\n\t}\n\n\treturn nil\n}", "func PrintDownloadPercent(done chan int64, path string, total int64) {\n\n\tvar stop bool = false\n\n\tfor {\n\t\tselect {\n\t\tcase <-done:\n\t\t\tstop = true\n\t\tdefault:\n\n\t\t\tfile, err := os.Open(path)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\n\t\t\tfi, err := file.Stat()\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\n\t\t\tsize := fi.Size()\n\n\t\t\tif size == 0 {\n\t\t\t\tsize = 1\n\t\t\t}\n\n\t\t\tvar percent float64 = float64(size) / float64(total) * 100\n\n\t\t\tfmt.Printf(\"%.0f\", percent)\n\t\t\tfmt.Print(\"% ... \")\n\t\t}\n\n\t\tif stop {\n\t\t\tbreak\n\t\t}\n\n\t\ttime.Sleep(time.Second)\n\t}\n}", "func (h *Histogram) Export() *HistogramData {\n\tvar res HistogramData\n\tres.Count = h.Counter.Count\n\tres.Min = h.Counter.Min\n\tres.Max = h.Counter.Max\n\tres.Sum = h.Counter.Sum\n\tres.Avg = h.Counter.Avg()\n\tres.StdDev = h.Counter.StdDev()\n\tmultiplier := h.Divider\n\toffset := h.Offset\n\t// calculate the last bucket index\n\tlastIdx := -1\n\tfor i := numBuckets - 1; i >= 0; i-- {\n\t\tif h.Hdata[i] > 0 {\n\t\t\tlastIdx = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif lastIdx == -1 {\n\t\treturn &res\n\t}\n\n\t// previous bucket value:\n\tprev := histogramBucketValues[0]\n\tvar total int64\n\tctrTotal := float64(h.Count)\n\t// export the data of each bucket of the histogram\n\tfor i := 0; i <= lastIdx; i++ {\n\t\tif h.Hdata[i] == 0 {\n\t\t\t// empty bucket: skip it but update prev which is needed for next iter\n\t\t\tif i < numValues {\n\t\t\t\tprev = histogramBucketValues[i]\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\tvar b Bucket\n\t\ttotal += int64(h.Hdata[i])\n\t\tif len(res.Data) == 0 {\n\t\t\t// First entry, start is min\n\t\t\tb.Start = h.Min\n\t\t} else {\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t}\n\t\tb.Percent = 100. * float64(total) / ctrTotal\n\t\tif i < numValues {\n\t\t\tcur := histogramBucketValues[i]\n\t\t\tb.End = multiplier*float64(cur) + offset\n\t\t\tprev = cur\n\t\t} else {\n\t\t\t// Last Entry\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t\tb.End = h.Max\n\t\t}\n\t\tb.Count = int64(h.Hdata[i])\n\t\tres.Data = append(res.Data, b)\n\t}\n\tres.Data[len(res.Data)-1].End = h.Max\n\treturn &res\n}", "func (NilTimer) Percentile(p float64) float64 { return 0.0 }", "func (_m *Reporter) Histogram(name string, value float64, tags ...monitoring.Tag) {\n\t_va := make([]interface{}, len(tags))\n\tfor _i := range tags {\n\t\t_va[_i] = tags[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, name, value)\n\t_ca = append(_ca, _va...)\n\t_m.Called(_ca...)\n}", "func (t *StandardTimer) Percentile(p float64) float64 {\n\treturn t.histogram.Percentile(p)\n}", "func (pb *Pbar) printValues() {\n\tif pb.printNumbers == 1 {\n\t\tansi.Printf(\"\\u001b[1B\")\n\t\tansi.Printf(\"\\033[2G\\033[0m%d / %d\\u001b[1A\", pb.currentAmount, pb.totalAmount)\n\t} else if pb.printNumbers == 2 {\n\t\tnewPercent := int(100*float32(pb.currentAmount)/float32(pb.totalAmount))\n\t\tif newPercent > pb.currentPercent {\n\t\t\tansi.Printf(\"\\u001b[1B\")\n\t\t\tansi.Printf(\"\\033[2G\\033[0m%d%s\\u001b[1A\", newPercent, \"%\")\n\t\t\tpb.currentPercent = newPercent\n\t\t}\n\t}\n}", "func PrintDependencyCounts(ch chan DependencyCountItem) {\n\tout := getOutputFile()\n\tfor item := range ch {\n\t\t_, err := fmt.Fprintf(out, \"%s: %d\\n\", item.Dependency, item.Count)\n\t\tif err != nil {\n\t\t\tfatalError(err)\n\t\t}\n\t}\n\tcloseOutputFile(out)\n}", "func (as Authors) Write() {\n\tfor _, a := range as {\n\t\tfmt.Println(a.Full_name)\n\t}\n\tnames := func(fullName string) []string { return strings.Split(fullName, \" \") }\n\tfirstName := func(fullName string) string { return names(fullName)[0] }\n\tfuncMap := map[string]interface{}{\n\t\t\"funcName\": func() string { return \"hlsm_authors\" },\n\t\t\"userLogin\": func(fullName string) string {\n\t\t\treturn strings.ToLower(firstName(fullName))\n\t\t},\n\t\t\"firstName\": firstName,\n\t\t\"lastName\": func(fullName string) string { return names(fullName)[1] },\n\t}\n\twriter.Execute(\"authors\", as, funcMap)\n}", "func (m *Printer) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.PrinterBase.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetConnectors() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetConnectors()))\n for i, v := range m.GetConnectors() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"connectors\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"hasPhysicalDevice\", m.GetHasPhysicalDevice())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isShared\", m.GetIsShared())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastSeenDateTime\", m.GetLastSeenDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"registeredDateTime\", m.GetRegisteredDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetShares() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetShares()))\n for i, v := range m.GetShares() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"shares\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTaskTriggers() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTaskTriggers()))\n for i, v := range m.GetTaskTriggers() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"taskTriggers\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (e errChunkIterator) AtHistogram() (int64, *histogram.Histogram) { panic(\"not implemented\") }", "func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tq := vals[0].(Vector)[0].F\n\tinVec := vals[1].(Vector)\n\n\tif enh.signatureToMetricWithBuckets == nil {\n\t\tenh.signatureToMetricWithBuckets = map[string]*metricWithBuckets{}\n\t} else {\n\t\tfor _, v := range enh.signatureToMetricWithBuckets {\n\t\t\tv.buckets = v.buckets[:0]\n\t\t}\n\t}\n\n\tvar histogramSamples []Sample\n\n\tfor _, sample := range inVec {\n\t\t// We are only looking for conventional buckets here. Remember\n\t\t// the histograms for later treatment.\n\t\tif sample.H != nil {\n\t\t\thistogramSamples = append(histogramSamples, sample)\n\t\t\tcontinue\n\t\t}\n\n\t\tupperBound, err := strconv.ParseFloat(\n\t\t\tsample.Metric.Get(model.BucketLabel), 64,\n\t\t)\n\t\tif err != nil {\n\t\t\t// Oops, no bucket label or malformed label value. Skip.\n\t\t\t// TODO(beorn7): Issue a warning somehow.\n\t\t\tcontinue\n\t\t}\n\t\tenh.lblBuf = sample.Metric.BytesWithoutLabels(enh.lblBuf, labels.BucketLabel)\n\t\tmb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]\n\t\tif !ok {\n\t\t\tsample.Metric = labels.NewBuilder(sample.Metric).\n\t\t\t\tDel(excludedLabels...).\n\t\t\t\tLabels()\n\n\t\t\tmb = &metricWithBuckets{sample.Metric, nil}\n\t\t\tenh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb\n\t\t}\n\t\tmb.buckets = append(mb.buckets, bucket{upperBound, sample.F})\n\n\t}\n\n\t// Now deal with the histograms.\n\tfor _, sample := range histogramSamples {\n\t\t// We have to reconstruct the exact same signature as above for\n\t\t// a conventional histogram, just ignoring any le label.\n\t\tenh.lblBuf = sample.Metric.Bytes(enh.lblBuf)\n\t\tif mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]; ok && len(mb.buckets) > 0 {\n\t\t\t// At this data point, we have conventional histogram\n\t\t\t// buckets and a native histogram with the same name and\n\t\t\t// labels. Do not evaluate anything.\n\t\t\t// TODO(beorn7): Issue a warning somehow.\n\t\t\tdelete(enh.signatureToMetricWithBuckets, string(enh.lblBuf))\n\t\t\tcontinue\n\t\t}\n\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(sample.Metric),\n\t\t\tF: histogramQuantile(q, sample.H),\n\t\t})\n\t}\n\n\tfor _, mb := range enh.signatureToMetricWithBuckets {\n\t\tif len(mb.buckets) > 0 {\n\t\t\tenh.Out = append(enh.Out, Sample{\n\t\t\t\tMetric: mb.metric,\n\t\t\t\tF: bucketQuantile(q, mb.buckets),\n\t\t\t})\n\t\t}\n\t}\n\n\treturn enh.Out\n}", "func (p *Printer) Write(w io.Writer, v interface{}) (_ int, err error) {\n\tdefer func() {\n\t\tswitch r := recover().(type) {\n\t\tcase panicSentinel:\n\t\t\terr = r.Err\n\t\tdefault:\n\t\t\tpanic(r)\n\t\tcase nil:\n\t\t\t// no error\n\t\t}\n\t}()\n\n\tcfg := p.Config\n\n\tif len(cfg.Indent) == 0 {\n\t\tcfg.Indent = DefaultIndent\n\t}\n\n\tif cfg.ZeroValueMarker == \"\" {\n\t\tcfg.ZeroValueMarker = DefaultZeroValueMarker\n\t}\n\n\tif cfg.RecursionMarker == \"\" {\n\t\tcfg.RecursionMarker = DefaultRecursionMarker\n\t}\n\n\tcounter := &stream.Counter{\n\t\tTarget: w,\n\t}\n\n\tr := &renderer{\n\t\tIndenter: stream.Indenter{\n\t\t\tTarget: counter,\n\t\t\tIndent: []byte(cfg.Indent),\n\t\t},\n\t\tConfiguration: cfg,\n\t\tRecursionSet: map[uintptr]struct{}{},\n\t}\n\n\trv := reflect.ValueOf(v)\n\tvar rt reflect.Type\n\n\tif rv.Kind() != reflect.Invalid {\n\t\trt = rv.Type()\n\t}\n\n\tr.WriteValue(\n\t\tValue{\n\t\t\tValue: rv,\n\t\t\tDynamicType: rt,\n\t\t\tStaticType: typeOf[any](),\n\t\t\tIsAmbiguousDynamicType: true,\n\t\t\tIsAmbiguousStaticType: true,\n\t\t\tIsUnexported: false,\n\t\t},\n\t)\n\n\treturn counter.Count(), nil\n}", "func (h *Histogram) String() string {\n\tvar strs []string\n\tfor _, b := range h.Buckets {\n\t\tstrs = append(strs, fmt.Sprintf(\"[%d,%d):%d\", b.Min, b.Max, b.Count))\n\t}\n\treturn h.Name + \": [\" + strings.Join(strs, \" \") + \"]; \" + fmt.Sprintf(\"sum %d\", h.Sum)\n}", "func WriteStats(path string, hot, cold []int64) error {\n\t// Copy before sort.\n\tcold = append([]int64{}, cold...)\n\thot = append([]int64{}, hot...)\n\n\tsort.Slice(cold, func(i, j int) bool { return cold[i] < cold[j] })\n\tsort.Slice(hot, func(i, j int) bool { return hot[i] < hot[j] })\n\n\tpackedCold, err := Pack(cold)\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"failed to pack uploaded items\").Err()\n\t}\n\n\tpackedHot, err := Pack(hot)\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"failed to pack not uploaded items\").Err()\n\t}\n\n\tstatsJSON, err := json.Marshal(struct {\n\t\tItemsCold []byte `json:\"items_cold\"`\n\t\tItemsHot []byte `json:\"items_hot\"`\n\t}{\n\t\tItemsCold: packedCold,\n\t\tItemsHot: packedHot,\n\t})\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"failed to marshal stats json\").Err()\n\t}\n\tif err := ioutil.WriteFile(path, statsJSON, 0600); err != nil {\n\t\treturn errors.Annotate(err, \"failed to write stats json\").Err()\n\t}\n\n\treturn nil\n}", "func percentile(orderedObservations []float64, l int, p float64) float64 {\n\treturn orderedObservations[int(p*float64(l))]\n}", "func HandlePercentile(w http.ResponseWriter, req *http.Request) {\n\tdecoder := json.NewDecoder(req.Body)\n\n\tvar numbers Numbers\n\n\terr := decoder.Decode(&numbers)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Prevent division by zero\n\tperc := Percentile(numbers)\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\n\tjsonRes, _ := json.Marshal(&Response{Result: perc})\n\n\tw.Write(jsonRes)\n}", "func PrintGCSummary(w io.Writer) {\r\n\tmemStats := &runtime.MemStats{}\r\n\truntime.ReadMemStats(memStats)\r\n\tgcstats := &debug.GCStats{PauseQuantiles: make([]time.Duration, 100)}\r\n\tdebug.ReadGCStats(gcstats)\r\n\r\n\tprintGC(memStats, gcstats, w)\r\n}", "func (c *Collector) Percentile(percent float64) float64 {\n\tc.Lock()\n\tdefer c.Unlock()\n\ttargetCount := int(percent * float64(c.Count) / 100)\n\n\tcount := 0\n\tfor _, b := range c.Buckets {\n\t\tcount += b.Count\n\t\tif count >= targetCount {\n\t\t\treturn b.Max\n\t\t}\n\t}\n\n\treturn c.Max\n}", "func (cfw *CoverageDataWriter) Write(metaFileHash [16]byte, args map[string]string, visitor CounterVisitor) error {\n\tif err := cfw.writeHeader(metaFileHash); err != nil {\n\t\treturn err\n\t}\n\treturn cfw.AppendSegment(args, visitor)\n}", "func PrintDiskStats() {\n\tfmt.Println(DiskStats)\n}", "func (*PUriHistogram) Descriptor() ([]byte, []int) {\n\treturn file_v1_Stat_proto_rawDescGZIP(), []int{24}\n}", "func ReportPrinter(w io.Writer, minPriority int8, colors bool) func(r reports.Report) error {\n\tvar pal *palette\n\tif colors {\n\t\tpal = &colored\n\t} else {\n\t\tpal = &notcolored\n\t}\n\n\tif w == nil {\n\t\tw = os.Stdout\n\t}\n\n\treturn func(r reports.Report) error {\n\t\tprintReport(r, w, minPriority, pal)\n\t\treturn nil\n\t}\n}", "func (a ASCIITableWriter) Write(out io.Writer, p *api.Project) error {\n\ttable := tablewriter.NewWriter(out)\n\ttable.SetHeader([]string{\"RELEASE\", \"Downloads\"})\n\n\tfor _, r := range p.Releases() {\n\t\ttable.Append([]string{r.Name, strconv.Itoa(r.DownloadCount())})\n\t}\n\ttable.Render()\n\treturn nil\n}", "func (h *Histogram) Log(msg string, percentiles []float64) {\n\tvar b bytes.Buffer\n\tw := bufio.NewWriter(&b)\n\th.Print(w, msg, percentiles)\n\tw.Flush() // nolint: gas,errcheck\n\tlog.Infof(\"%s\", b.Bytes())\n}", "func (s *Scanner) WriteTopN(n int, w io.Writer) {\n\tfor i := 0; i < n && i < len(s.Freqs); i++ {\n\t\tfmt.Fprintln(w, s.Freqs[i])\n\t}\n}", "func WriteScrapeResults(visitedMap map[string]struct{}, repos []GoRepo, filename string) {\n\tfile, err := os.OpenFile(filename, os.O_WRONLY|os.O_APPEND|os.O_RDWR, 0666)\n\tdefer file.Close()\n\tif err != nil {\n\t\tlog.Fatalf(\"could not open file %v\", err)\n\t}\n\toutput := csv.NewWriter(file)\n\tvar results [][]string\n\tfor _, repo := range repos {\n\t\tif _, ok := visitedMap[repo.ID()]; ok {\n\t\t\tcontinue\n\t\t}\n\t\tresults = append(results, []string{\n\t\t\trepo.Owner,\n\t\t\trepo.Repo,\n\t\t\tfmt.Sprintf(\"%s\", repo.CreatedAt.Format(time.RFC3339)),\n\t\t})\n\t\tvisitedMap[repo.ID()] = struct{}{}\n\t}\n\terr = output.WriteAll(results)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not write to file %s: %v\", filename, err)\n\t}\n}", "func writeProfile(l log.Logger) error {\n\tcpu, err := os.Create(\"cpu.prof\")\n\tif err != nil {\n\t\treturn err\n\t}\n\theap, err := os.Create(\"heap.prof\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tpprof.StartCPUProfile(cpu)\n\tgo func() {\n\t\tdefer cpu.Close()\n\t\tdefer heap.Close()\n\n\t\tl.Info().Msg(\"Profiling, will exit in 30 seconds\")\n\t\ttime.Sleep(30 * time.Second)\n\t\tpprof.StopCPUProfile()\n\t\tpprof.WriteHeapProfile(heap)\n\n\t\tos.Exit(0)\n\t}()\n\n\treturn nil\n}", "func (mw *multiWriterHolder) Write(p []byte) (int, error) {\n\ttype data struct {\n\t\tn int\n\t\terr error\n\t}\n\n\tresults := make(chan data, len(mw.writers))\n\tmw.mu.RLock()\n\tdefer mw.mu.RUnlock()\n\tfor x := range mw.writers {\n\t\tgo func(w io.Writer, p []byte, ch chan<- data) {\n\t\t\tn, err := w.Write(p)\n\t\t\tif err != nil {\n\t\t\t\tch <- data{n, fmt.Errorf(\"%T %w\", w, err)}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif n != len(p) {\n\t\t\t\tch <- data{n, fmt.Errorf(\"%T %w\", w, io.ErrShortWrite)}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tch <- data{n, nil}\n\t\t}(mw.writers[x], p, results)\n\t}\n\n\tfor range mw.writers {\n\t\t// NOTE: These results do not necessarily reflect the current io.writer\n\t\t// due to the go scheduler and writer finishing at different times, the\n\t\t// response coming from the channel might not match up with the for loop\n\t\t// writer.\n\t\td := <-results\n\t\tif d.err != nil {\n\t\t\treturn d.n, d.err\n\t\t}\n\t}\n\treturn len(p), nil\n}", "func (d TestSink) Histogram(c *telemetry.Context, stat string, value float64) {\n\td[stat] = TestMetric{\"Histogram\", value, c.Tags()}\n}", "func (sc *SafeCounter) printFileCounts() {\n\tlog.Printf(\"%d total files %.1f MB\\n\", sc.fileCounts[\"total\"], float64(sc.nbytes)/1e6)\n\tfor extension, count := range sc.fileCounts {\n\t\tif extension == \"total\" {\n\t\t\tcontinue\n\t\t}\n\t\tlog.Printf(\"%d %s files, %d Decode Errors\\n\", count, extension, sc.errorCounts[extension])\n\t}\n}", "func printResults(finalCount map[string]int) {\n\tfor index, phrase := range sortedKeys(finalCount) {\n\t\tif index == 100 {\n\t\t\tbreak\n\t\t}\n\t\tfmt.Printf(\"%v: %v\\n\", phrase, finalCount[phrase])\n\t}\n}", "func (p *Percentiles) Flush() ([]processorResult, bool) {\n\n\tsize := len(p.values)\n\tif size == 0 {\n\t\treturn nil, false\n\t}\n\n\tvar results []processorResult\n\tsort.Float64s(p.values)\n\n\tfor fcnName, percent := range p.percents {\n\t\trank := (percent / 100) * (float64(size) + 1)\n\t\tfloor := int(rank)\n\n\t\tif rank < 1 {\n\t\t\tresults = append(results, processorResult{fcnName, p.values[0]})\n\t\t} else if floor >= size {\n\t\t\tresults = append(results, processorResult{fcnName, p.values[size-1]})\n\t\t} else {\n\t\t\tfrac := rank - float64(floor)\n\t\t\tupper := floor + 1\n\t\t\tpercentile := p.values[floor-1] + frac*(p.values[upper-1]-p.values[floor-1])\n\t\t\tresults = append(results, processorResult{fcnName, percentile})\n\t\t}\n\t}\n\n\treturn results, true\n}", "func (c *Printer) PrintBuckets() {\n\tfor {\n\t\tbucket, more := <-c.channel\n\t\tif more {\n\t\t\tfmt.Fprintf(out, \"%s\\n\", bucket)\n\t\t} else {\n\t\t\tc.done <- true\n\t\t\treturn\n\t\t}\n\t}\n}", "func (m *PrinterDefaults) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetColorMode() != nil {\n cast := (*m.GetColorMode()).String()\n err := writer.WriteStringValue(\"colorMode\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"contentType\", m.GetContentType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"copiesPerJob\", m.GetCopiesPerJob())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"dpi\", m.GetDpi())\n if err != nil {\n return err\n }\n }\n if m.GetDuplexMode() != nil {\n cast := (*m.GetDuplexMode()).String()\n err := writer.WriteStringValue(\"duplexMode\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetFinishings() != nil {\n err := writer.WriteCollectionOfStringValues(\"finishings\", SerializePrintFinishing(m.GetFinishings()))\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteBoolValue(\"fitPdfToPage\", m.GetFitPdfToPage())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"inputBin\", m.GetInputBin())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"mediaColor\", m.GetMediaColor())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"mediaSize\", m.GetMediaSize())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"mediaType\", m.GetMediaType())\n if err != nil {\n return err\n }\n }\n if m.GetMultipageLayout() != nil {\n cast := (*m.GetMultipageLayout()).String()\n err := writer.WriteStringValue(\"multipageLayout\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n if m.GetOrientation() != nil {\n cast := (*m.GetOrientation()).String()\n err := writer.WriteStringValue(\"orientation\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"outputBin\", m.GetOutputBin())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"pagesPerSheet\", m.GetPagesPerSheet())\n if err != nil {\n return err\n }\n }\n if m.GetQuality() != nil {\n cast := (*m.GetQuality()).String()\n err := writer.WriteStringValue(\"quality\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetScaling() != nil {\n cast := (*m.GetScaling()).String()\n err := writer.WriteStringValue(\"scaling\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func writeHeapToFile(h *minHeap, filename string) {\n\tfile, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)\n\tdefer file.Close()\n\tif err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n\turlCntMap := make(map[string]int)\n\tfor i := 0; i < h.cap; i++ {\n\t\tpair, err := h.getNode(i)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\turlCntMap[pair.url] = pair.cnt\n\t}\n\tgenResult(filename, urlCntMap, h.cap)\n}", "func New(writer io.Writer, rate time.Duration) func(float64) error {\n\tvar (\n\t\ti int\n\t\tthrottle = time.Tick(rate)\n\t)\n\n\treturn func(pc float64) error {\n\t\t<-throttle\n\t\tif pc > 1 || pc < 0 {\n\t\t\treturn fmt.Errorf(\"percent %f invalid\", pc)\n\t\t}\n\t\t// Render text and padding.\n\t\tpl := 6 - len(strconv.Itoa(int(pc*1e+2)))\n\t\tstr := fmt.Sprintf(\"\\r%.2f%%%s\", pc*1e+2, strings.Repeat(\" \", pl))\n\t\t// Render bar.\n\t\tn := int(pc * 1e+2 / (float64(100) / float64(60)))\n\t\tstr += fmt.Sprintf(\"[%s%s]\", strings.Repeat(\"█\", n), strings.Repeat(\"-\", 60-n))\n\t\t// Render spinner.\n\t\tio.WriteString(writer, str+fmt.Sprintf(\" %c\", `-\\|/`[i%4]))\n\t\ti++\n\t\treturn nil\n\t}\n}", "func (p *BSONPrinter) Print(filename string) error {\n\tvar err error\n\tvar data []byte\n\tvar doc bson.M\n\tvar fd *bufio.Reader\n\tif fd, err = gox.NewFileReader(filename); err != nil {\n\t\treturn err\n\t}\n\tif data, err = ioutil.ReadAll(fd); err != nil {\n\t\treturn err\n\t}\n\tbson.Unmarshal(data, &doc)\n\tif doc[\"keyhole\"] == nil {\n\t\treturn errors.New(\"unsupported, keyhole signature not found\")\n\t}\n\tvar logger gox.Logger\n\tif buf, err := bson.Marshal(doc[\"keyhole\"]); err == nil {\n\t\tif err = bson.Unmarshal(buf, &logger); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(logger.Print())\n\t} else {\n\t\treturn err\n\t}\n\tif strings.HasSuffix(filename, \"-log.bson.gz\") {\n\t\tli := NewLogInfo(p.version)\n\t\tif err = li.AnalyzeFile(filename); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tli.Print()\n\t\tif err = li.OutputJSON(); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else if strings.HasSuffix(filename, \"-index.bson.gz\") {\n\t\tix := NewIndexStats(p.version)\n\t\tif err = ix.SetClusterDetailsFromFile(filename); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tix.Print()\n\t\tif err = ix.OutputJSON(); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else if strings.HasSuffix(filename, \".bson.gz\") {\n\t\tif strings.HasSuffix(filename, \"-perf.bson.gz\") {\n\t\t\ttype Perf struct {\n\t\t\t\tLogger *gox.Logger `bson:\"keyhole\"`\n\t\t\t\tMetrics map[string][]bson.M `bson:\"metrics\"`\n\t\t\t\tResults []string `bson:\"results\"`\n\t\t\t}\n\t\t\tvar perf Perf\n\t\t\tif err = bson.Unmarshal(data, &perf); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, res := range perf.Results {\n\t\t\t\tfmt.Println(res)\n\t\t\t}\n\t\t\tdelete(doc, \"results\")\n\t\t} else if strings.HasSuffix(filename, \"-stats.bson.gz\") {\n\t\t\tvar cluster ClusterStats\n\t\t\tif err = bson.Unmarshal(data, &cluster); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tcluster.Print()\n\t\t}\n\t\tos.Mkdir(outdir, 0755)\n\t\tofile := filepath.Base(filename)\n\t\tidx := strings.Index(ofile, \".bson\")\n\t\tofile = fmt.Sprintf(`%v/%v.json`, outdir, (ofile)[:idx])\n\t\tif data, err = bson.MarshalExtJSON(doc, false, false); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err = ioutil.WriteFile(ofile, data, 0644); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(\"json data written to\", ofile)\n\t} else {\n\t\treturn errors.New(\"unsupported\")\n\t}\n\treturn err\n}", "func (p *OnuIgmpProfile) Tabwrite() {\r\n\tfmt.Println(\"|| ONU IGMP Profile ||\")\r\n\tl := p.ListEssentialParams()\r\n\ttw := new(tabwriter.Writer).Init(os.Stdout, 0, 8, 2, ' ', 0)\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", v)\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", fs(v))\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", l[v])\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", fs(v))\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\ttw.Flush()\r\n}", "func writer() {\n\n\tdefer WaitGroup.Done()\n\n\tvar opuslen int16\n\tvar err error\n\n\t// 16KB output buffer\n\tstdout := bufio.NewWriterSize(os.Stdout, 16384)\n\tdefer func() {\n\t\terr := stdout.Flush()\n\t\tif err != nil {\n\t\t\tlog.Println(\"error flushing stdout, \", err)\n\t\t}\n\t}()\n\n\tfor {\n\t\topus, ok := <-OutputChan\n\t\tif !ok {\n\t\t\t// if chan closed, exit\n\t\t\treturn\n\t\t}\n\n\t\t// write header\n\t\topuslen = int16(len(opus))\n\t\terr = binary.Write(stdout, binary.LittleEndian, &opuslen)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"error writing output: \", err)\n\t\t\treturn\n\t\t}\n\n\t\t// write opus data to stdout\n\t\terr = binary.Write(stdout, binary.LittleEndian, &opus)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"error writing output: \", err)\n\t\t\treturn\n\t\t}\n\t}\n}", "func critHashWriter(prime int, nextPrimeWG *sync.WaitGroup, critHashEntryChan <-chan critHashEntry, preperiodicChan chan<- []string) {\n\thash := make(map[preP][]int, prime)\n\n\tcounter := preperiodicCounter{0, 0, 0, 1, 1, 1} //initialie counter after c=0 is accounted for\n\tfor i := 0; i < prime-1; i++ {\n\t\ta := <-critHashEntryChan //a is a hash entry from the channel\n\t\tincrementPreperiodicCounter(&counter, a)\n\t\thash[[2]int{a.h, a.n}] = append(hash[[2]int{a.h, a.n}], a.constant)\n\t}\n\tout := scorecritHash(prime, hash, counter)\n\tpreperiodicChan <- []string{strconv.Itoa(out.prime), strconv.FormatFloat(out.hAvg, 'f', -1, 64), strconv.Itoa(out.hMax), strconv.FormatFloat(out.nAvg, 'f', -1, 64), strconv.Itoa(out.nMax), strconv.FormatFloat(out.hnAvg, 'f', -1, 64), strconv.Itoa(out.hnMax), strconv.FormatFloat(out.tAvg, 'f', -1, 64), strconv.Itoa(out.tMax), strconv.FormatFloat(out.singletonRatio, 'f', -1, 64), strconv.Itoa(out.nonsingletonClasses)}\n\n\tnextPrimeWG.Done()\n}", "func (writer *testWriter) Write(b []byte) (int, error) {\n\tfmt.Print(\"[OUT] > \")\n\treturn os.Stdout.Write(b)\n}", "func (b *Bench) String() string {\n\tprefix := \" \"\n\tvar buf bytes.Buffer\n\tpercentiles := []float64{5, 50, 70, 90, 95, 99, 99.9, 99.95, 99.99, 100}\n\n\tif b.rps <= 0 {\n\t\tfmt.Fprintf(&buf, \"Duration: %2.2fs, Concurrency: %d, Total runs: %d\\n\", b.timeTaken.Seconds(), b.concurrentRuns, b.calls)\n\t} else {\n\t\tfmt.Fprintf(&buf, \"Rate: %d calls/sec, Duration: %2.2fs, Concurrency: %d, Total runs: %d\\n\", b.rps, b.timeTaken.Seconds(), b.concurrentRuns, b.calls)\n\t}\n\n\tfor n, h := range b.timers {\n\t\tfmt.Fprintf(&buf, \"%s>>Timer: %s \\n\", prefix, n)\n\t\tfor _, p := range percentiles {\n\t\t\tfmt.Fprintf(&buf, \"%s%s%2.2fth percentile: %.2fms\\n\", prefix, prefix, p, float64(h.ValueAtQuantile(p))/1000000)\n\t\t}\n\t\tfmt.Fprintf(&buf, \"%s%sMean: %.2fms\\n\", prefix, prefix, float64(h.Mean())/1000000.0)\n\t}\n\tfor n, count := range b.counters {\n\t\tfmt.Fprintf(&buf, \"%s>>Counter: %s\\n\", prefix, n)\n\t\tfmt.Fprintf(&buf, \"%s%sValue: %d \\n\", prefix, prefix, count)\n\t}\n\treturn buf.String()\n}", "func (h *Hist) SaveImage(f string) {\n\tdata := plotter.Values(h.Counts)\n\n\tif h.Normalize {\n\t\tdata = h.NormCounts()\n\t}\n\n\tp, err := plot.New()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tp.Title.Text = h.Title\n\tp.Y.Label.Text = \"Count\"\n\tif h.Normalize {\n\t\tp.Y.Label.Text = \"Frequency\"\n\t}\n\n\tbins := make([]plotter.HistogramBin, len(h.BinStart))\n\tfor i, binStart := range h.BinStart {\n\t\tbins[i] = plotter.HistogramBin{binStart, h.BinEnd[i], data[i]}\n\t}\n\n\tph := &plotter.Histogram{\n\t\tBins: bins,\n\t\tWidth: h.DataMax - h.DataMin,\n\t\tFillColor: plotutil.Color(2),\n\t\tLineStyle: plotter.DefaultLineStyle,\n\t}\n\tph.LineStyle.Width = vg.Length(0.5)\n\tph.Color = plotutil.Color(0)\n\n\tp.Add(ph)\n\tdigits := strconv.Itoa(int(h.Precision))\n\tmodeStr := fmt.Sprintf(\" ApproxMode: %.\"+digits+\"f\", h.GetMode())\n\tinfo := strings.TrimRight(h.Info, \"\\n\") + modeStr\n\tp.X.Label.Text = info\n\n\tif err := p.Save(11.69*vg.Inch, 8.27*vg.Inch, f); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (m *MockHistogram) Percentile(percentile float64) float64 {\n\targs := m.Called(percentile)\n\treturn args.Get(0).(float64)\n}", "func (io *Io) Print(a interface{}) {\n\tfmt.Fprint(io.writer, a)\n}", "func printFooter(w io.Writer, info *athena.QueryExecution) {\n\tstats := info.Statistics\n\trunTimeMs := aws.Int64Value(stats.EngineExecutionTimeInMillis)\n\tscannedBytes := aws.Int64Value(stats.DataScannedInBytes)\n\tloc := aws.StringValue(info.ResultConfiguration.OutputLocation)\n\tlog.Printf(\"EngineExecutionTimeInMillis: %d milliseconds\\n\", runTimeMs)\n\tlog.Printf(\"DataScannedInBytes: %d bytes\\n\", scannedBytes)\n\tlog.Printf(\"OutputLocation: %s\\n\", loc)\n\tfmt.Fprintf(w, \"Run time: %.2f seconds | Data scanned: %s\\nLocation: %s\\n\",\n\t\tfloat64(runTimeMs)/1000, FormatBytes(scannedBytes), loc)\n}", "func (buckets HistogramBuckets) Table() string {\n\tif len(buckets) == 0 {\n\t\treturn \"\"\n\t}\n\tbuf := bytes.NewBuffer(nil)\n\ttb := tablewriter.NewWriter(buf)\n\ttb.SetAutoWrapText(false)\n\ttb.SetColWidth(1500)\n\ttb.SetCenterSeparator(\"*\")\n\ttb.SetAlignment(tablewriter.ALIGN_CENTER)\n\ttb.SetCaption(true, fmt.Sprintf(\"\t(%q scale)\", buckets[0].Scale))\n\ttb.SetHeader([]string{\"lower bound\", \"upper bound\", \"count\"})\n\tfor _, v := range buckets {\n\t\tlo := fmt.Sprintf(\"%f\", v.LowerBound)\n\t\tif v.Scale == \"milliseconds\" {\n\t\t\tlo = fmt.Sprintf(\"%.3f\", v.LowerBound)\n\t\t}\n\t\thi := fmt.Sprintf(\"%f\", v.UpperBound)\n\t\tif v.Scale == \"milliseconds\" {\n\t\t\thi = fmt.Sprintf(\"%.3f\", v.UpperBound)\n\t\t}\n\t\tif v.UpperBound == math.MaxFloat64 {\n\t\t\thi = \"math.MaxFloat64\"\n\t\t}\n\t\ttb.Append([]string{lo, hi, fmt.Sprintf(\"%d\", v.Count)})\n\t}\n\ttb.Render()\n\treturn buf.String()\n}", "func ProgressBar(before string, after string, hook func(loaded float64, speed float64, remain float64), writer io.Writer) func(received int64, readed int64, total int64, start int64, end int64) {\n\tvar (\n\t\tstartTime = time.Now()\n\t\tlastTime = startTime\n\t\tlastReceived int64\n\t)\n\treturn func(received int64, readed int64, total int64, start int64, end int64) {\n\t\ttickerDuration := time.Since(lastTime).Seconds()\n\t\tif tickerDuration < 1 && received < total {\n\t\t\treturn\n\t\t}\n\t\tduration := time.Since(startTime).Seconds()\n\t\tloaded := float64(start+received) / float64(end) * 100\n\t\tspeed := float64(received) / 1024 / duration\n\t\tcurrspeed := float64(received-lastReceived) / 1024 / tickerDuration\n\t\tremain := float64(total-received) / 1024 / speed\n\t\tif hook != nil {\n\t\t\thook(float64(start+readed)/float64(end)*100, speed, remain)\n\t\t}\n\t\tif writer == nil {\n\t\t\twriter = os.Stdout\n\t\t}\n\t\tfmt.Fprintf(writer, \"\\r\\033[2K\\r%s%s%.1f%% %s/%s/%s %.2fKB/s %.2fKB/s %.1f %.1f%s\", before, Bar(int(loaded), 25), loaded, ByteFormat(uint64(start+readed)), ByteFormat(uint64(start+received)), ByteFormat(uint64(total)), speed, currspeed, duration, remain, after)\n\t\tlastReceived = received\n\t\tlastTime = time.Now()\n\t}\n}", "func (b *B) ReportMetric(n float64, unit string) {}", "func (c *Cuckoo) Print() {\n\tfor ti, t := range c.tables {\n\t\tfor si, s := range t.buckets {\n\t\t\tfmt.Printf(\"[%d][%d]: \", ti, si)\n\t\t\tcnt := 0\n\t\t\tfor _, b := range s {\n\t\t\t\tif b.key != c.emptyKey {\n\t\t\t\t\tcnt++\n\t\t\t\t}\n\t\t\t}\n\t\t\tfmt.Printf(\"%d\\n\", cnt)\n\t\t}\n\t}\n}", "func dumpMetrics(t *testing.T, metrics []map[string]float64, outfile string) {\n\toutFile := getOutFile(outfile)\n\n\tf, err := os.OpenFile(outFile, os.O_CREATE|os.O_WRONLY, 0666)\n\trequire.NoError(t, err, \"Failed opening file\")\n\tdefer f.Close()\n\n\theaderSet := make(map[string]bool)\n\tfor _, metric := range metrics {\n\t\tfor area := range metric {\n\t\t\theaderSet[area] = true\n\t\t}\n\t}\n\n\tvar headers []string\n\tfor area := range headerSet {\n\t\theaders = append(headers, area)\n\t}\n\n\twriter := csv.NewWriter(f)\n\n\terr = writer.Write(headers)\n\trequire.NoError(t, err, \"Failed writting file\")\n\twriter.Flush()\n\n\tfor _, metric := range metrics {\n\t\tvar data []string\n\t\tfor _, header := range headers {\n\t\t\tvalue, isPresent := metric[header]\n\t\t\tif isPresent {\n\t\t\t\tvStr := strconv.FormatFloat(value, 'f', -1, 64)\n\t\t\t\tdata = append(data, vStr)\n\t\t\t} else {\n\t\t\t\tdata = append(data, \"\")\n\t\t\t}\n\t\t}\n\t\terr = writer.Write(data)\n\t\trequire.NoError(t, err, \"Failed writting file\")\n\t\twriter.Flush()\n\t}\n}", "func Fprintln(w io.Writer, a ...interface{}) (n int, err error) { return fmt.Fprintln(w, a...) }", "func (oipl *OnuIgmpProfileList) Tabwrite() {\r\n\tfmt.Println(\"|| ONU IGMP Profile List ||\")\r\n\t// create the writer\r\n\ttw := new(tabwriter.Writer).Init(os.Stdout, 0, 8, 2, ' ', 0)\r\n\t// write tab-separated header values to tw buffer\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", v)\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\t// write tab-separated spacers (-) reflecting the length of the headers\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", fs(v))\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\toips := oipl.Separate()\r\n\tfor _, oip := range oips {\r\n\t\t// first get the data as a map\r\n\t\tl := oip.ListEssentialParams()\r\n\t\t// iterate over the map using the header as string key\r\n\t\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\t\tfmt.Fprintf(tw, \"%v\\t\", l[v])\r\n\t\t}\r\n\t\tfmt.Fprintf(tw, \"\\n\")\r\n\t}\r\n\r\n\t// write tab-separated spacers (-) reflecting the length of the headers\r\n\tfor _, v := range OnuIgmpProfileHeaders {\r\n\t\tfmt.Fprintf(tw, \"%v\\t\", fs(v))\r\n\t}\r\n\tfmt.Fprintf(tw, \"\\n\")\r\n\t// calculate column width and print table from tw buffer\r\n\ttw.Flush()\r\n}", "func (bc *Benchmark) PrintStats() {\n\tbc.stats.Print()\n}", "func (b *Bar) Percent(i int) {\n\tif i != b.percent {\n\t\tb.percent = i\n\t\tb.draw()\n\t}\n}", "func (t *Tree) Print(w io.Writer, f IterateFunc, itemSiz int) {\n\n\tfmt.Fprintf(w, \"treeNode-+-Left \\t / Left High\\n\")\n\tfmt.Fprintf(w, \" | \\t = Equal\\n\")\n\tfmt.Fprintf(w, \" +-Right\\t \\\\ Right High\\n\\n\")\n\n\tmaxHeight := t.Height()\n\n\tif f != nil && t.root != nil {\n\t\td := &printData{0, itemSiz, make([]byte, maxHeight), 0, f, w}\n\t\td.printer(t.root)\n\t}\n}", "func (w *workTally) Print() {\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string{\"Issue ID\", \"Summary\", \"Time Spent\"})\n\ttable.SetFooter([]string{\"\", \"Total\", w.total.String()})\n\ttable.SetBorder(false)\n\tfor _, key := range w.sortedKeys() {\n\t\tentry := w.durationMap[key]\n\t\ttable.Append([]string{\n\t\t\tkey,\n\t\t\ttruncateString(entry.summary, 64),\n\t\t\tentry.duration.String(),\n\t\t})\n\t}\n\tfmt.Println(\"\")\n\ttable.Render()\n}", "func printTable(out io.Writer, rows [][]string) {\n\ttw := tablewriter.NewWriter(out)\n\ttw.AppendBulk(rows)\n\ttw.Render()\n}" ]
[ "0.6858314", "0.63657504", "0.59310836", "0.5901898", "0.5371624", "0.53201205", "0.5209276", "0.5158634", "0.5095373", "0.5086567", "0.50616884", "0.5002433", "0.4985743", "0.4984203", "0.49557132", "0.495502", "0.49234828", "0.4906405", "0.48740783", "0.4846003", "0.4841387", "0.48335028", "0.48101175", "0.48039278", "0.47810483", "0.47486505", "0.47395676", "0.47211188", "0.46720114", "0.46543553", "0.4654272", "0.4643518", "0.4625536", "0.46231544", "0.46214628", "0.4608257", "0.46024176", "0.4599497", "0.45986396", "0.45817506", "0.45687315", "0.4548345", "0.45450655", "0.45448446", "0.45408556", "0.4540772", "0.4535075", "0.4530675", "0.45256224", "0.45186812", "0.4503998", "0.44805607", "0.44353718", "0.44310123", "0.44227076", "0.44211537", "0.44206354", "0.441993", "0.44186077", "0.43967512", "0.43894067", "0.43887317", "0.43795803", "0.43716487", "0.4345463", "0.4343729", "0.4338", "0.4336121", "0.43204316", "0.43167895", "0.43033364", "0.42950332", "0.42935064", "0.42882854", "0.4287896", "0.42805758", "0.4275936", "0.42756426", "0.42603436", "0.42593807", "0.42535675", "0.42503113", "0.4240041", "0.42398712", "0.4234334", "0.4232508", "0.42309928", "0.4227829", "0.42253074", "0.4219973", "0.42163655", "0.42094046", "0.42041704", "0.4201806", "0.41987363", "0.4196906", "0.41926342", "0.41903687", "0.41879576", "0.4185962" ]
0.62530786
2
Print dumps the histogram (and counter) to the provided writer. Also calculates the percentiles. Use Export() once and Print if you are going to need the Export results too.
func (h *Histogram) Print(out io.Writer, msg string, percentiles []float64) { h.Export().CalcPercentiles(percentiles).Print(out, msg) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (v HistogramValue) Print(w io.Writer) {\n\tavg := float64(v.Sum) / float64(v.Count)\n\tfmt.Fprintf(w, \"Count: %d Min: %d Max: %d Avg: %.2f\\n\", v.Count, v.Min, v.Max, avg)\n\tfmt.Fprintf(w, \"%s\\n\", strings.Repeat(\"-\", 60))\n\tif v.Count <= 0 {\n\t\treturn\n\t}\n\n\tmaxBucketDigitLen := len(strconv.FormatFloat(v.Buckets[len(v.Buckets)-1].LowBound, 'f', 6, 64))\n\tif maxBucketDigitLen < 3 {\n\t\t// For \"inf\".\n\t\tmaxBucketDigitLen = 3\n\t}\n\tmaxCountDigitLen := len(strconv.FormatInt(v.Count, 10))\n\tpercentMulti := 100 / float64(v.Count)\n\n\taccCount := int64(0)\n\tfor i, b := range v.Buckets {\n\t\tfmt.Fprintf(w, \"[%*f, \", maxBucketDigitLen, b.LowBound)\n\t\tif i+1 < len(v.Buckets) {\n\t\t\tfmt.Fprintf(w, \"%*f)\", maxBucketDigitLen, v.Buckets[i+1].LowBound)\n\t\t} else {\n\t\t\tfmt.Fprintf(w, \"%*s)\", maxBucketDigitLen, \"inf\")\n\t\t}\n\n\t\taccCount += b.Count\n\t\tfmt.Fprintf(w, \" %*d %5.1f%% %5.1f%%\", maxCountDigitLen, b.Count, float64(b.Count)*percentMulti, float64(accCount)*percentMulti)\n\n\t\tconst barScale = 0.1\n\t\tbarLength := int(float64(b.Count)*percentMulti*barScale + 0.5)\n\t\tfmt.Fprintf(w, \" %s\\n\", strings.Repeat(\"#\", barLength))\n\t}\n}", "func (e *HistogramData) Print(out io.Writer, msg string) {\n\tif len(e.Data) == 0 {\n\t\t_, _ = fmt.Fprintf(out, \"%s : no data\\n\", msg) // nolint: gas\n\t\treturn\n\t}\n\t// the base counter part:\n\t_, _ = fmt.Fprintf(out, \"%s : count %d avg %.8g +/- %.4g min %g max %g sum %.9g\\n\",\n\t\tmsg, e.Count, e.Avg, e.StdDev, e.Min, e.Max, e.Sum)\n\t_, _ = fmt.Fprintln(out, \"# range, mid point, percentile, count\")\n\tsep := \">=\"\n\tfor i, b := range e.Data {\n\t\tif i > 0 {\n\t\t\tsep = \">\" // last interval is inclusive (of max value)\n\t\t}\n\t\t_, _ = fmt.Fprintf(out, \"%s %.6g <= %.6g , %.6g , %.2f, %d\\n\", sep, b.Start, b.End, (b.Start+b.End)/2., b.Percent, b.Count)\n\t}\n\n\t// print the information of target percentiles\n\tfor _, p := range e.Percentiles {\n\t\t_, _ = fmt.Fprintf(out, \"# target %g%% %.6g\\n\", p.Percentile, p.Value) // nolint: gas\n\t}\n}", "func (m *Measurement) PrintStats(w io.Writer) {\n\ttype Hist struct {\n\t\t*Result\n\t\t*hrtime.Histogram\n\t}\n\n\thists := []Hist{}\n\tfor _, result := range m.Results {\n\t\thists = append(hists, Hist{\n\t\t\tResult: result,\n\t\t\tHistogram: hrtime.NewDurationHistogram(result.Durations, &hrtime.HistogramOptions{\n\t\t\t\tBinCount: 10,\n\t\t\t\tNiceRange: true,\n\t\t\t\tClampMaximum: 0,\n\t\t\t\tClampPercentile: 0.999,\n\t\t\t}),\n\t\t})\n\t}\n\n\tmsec := func(ns float64) string {\n\t\treturn fmt.Sprintf(\"%.2f\", ns/1e6)\n\t}\n\n\tfor _, hist := range hists {\n\t\tfmt.Fprintf(w, \"%v\\t%v\\t%v\\t%v\\t%v\\t%v\\t%v\\t%v\\n\",\n\t\t\tm.Parts, m.Segments, hist.Name,\n\t\t\tmsec(hist.Average),\n\t\t\tmsec(hist.Maximum),\n\t\t\tmsec(hist.P50),\n\t\t\tmsec(hist.P90),\n\t\t\tmsec(hist.P99),\n\t\t)\n\t}\n}", "func (hist *Histogram) WriteTo(w io.Writer) (int64, error) {\n\twritten, err := hist.WriteStatsTo(w)\n\tif err != nil {\n\t\treturn written, err\n\t}\n\n\t// TODO: use consistently single unit instead of multiple\n\tmaxCountLength := 3\n\tfor i := range hist.Bins {\n\t\tx := (int)(math.Ceil(math.Log10(float64(hist.Bins[i].Count + 1))))\n\t\tif x > maxCountLength {\n\t\t\tmaxCountLength = x\n\t\t}\n\t}\n\n\tvar n int\n\tfor _, bin := range hist.Bins {\n\t\tif bin.andAbove {\n\t\t\tn, err = fmt.Fprintf(w, \" %10v+[%[2]*[3]v] \", time.Duration(round(bin.Start, 3)), maxCountLength, bin.Count)\n\t\t} else {\n\t\t\tn, err = fmt.Fprintf(w, \" %10v [%[2]*[3]v] \", time.Duration(round(bin.Start, 3)), maxCountLength, bin.Count)\n\t\t}\n\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\n\t\twidth := float64(hist.Width) * bin.Width\n\t\tfrac := width - math.Trunc(width)\n\n\t\tn, err = io.WriteString(w, strings.Repeat(\"█\", int(width)))\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\n\t\tif frac > 0.5 {\n\t\t\tn, err = io.WriteString(w, `▌`)\n\t\t\twritten += int64(n)\n\t\t\tif err != nil {\n\t\t\t\treturn written, err\n\t\t\t}\n\t\t}\n\n\t\tn, err = fmt.Fprintf(w, \"\\n\")\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\t}\n\treturn written, nil\n}", "func WriteMetrics(r metrics.Registry, w io.Writer) {\n\tvar namedMetrics namedMetricSlice\n\tr.Each(func(name string, i interface{}) {\n\t\tnamedMetrics = append(namedMetrics, namedMetric{name, i})\n\t})\n\n\tsort.Sort(namedMetrics)\n\tfor _, namedMetric := range namedMetrics {\n\t\tswitch metric := namedMetric.m.(type) {\n\t\tcase metrics.Counter:\n\t\t\tfmt.Fprintf(w, \"counter %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count: %9d\\n\", metric.Count())\n\t\tcase metrics.Gauge:\n\t\t\tfmt.Fprintf(w, \"gauge %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" value: %9d\\n\", metric.Value())\n\t\tcase metrics.GaugeFloat64:\n\t\t\tfmt.Fprintf(w, \"gauge %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" value: %f\\n\", metric.Value())\n\t\tcase metrics.Healthcheck:\n\t\t\tmetric.Check()\n\t\t\tfmt.Fprintf(w, \"healthcheck %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" error: %v\\n\", metric.Error())\n\t\tcase metrics.Histogram:\n\t\t\th := metric.Snapshot()\n\t\t\tps := h.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})\n\t\t\tfmt.Fprintf(w, \"histogram %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count=%d, mean=%.2f, stddef=%.2f\\n\", h.Count(), h.Mean(), h.StdDev())\n\t\t\tfmt.Fprintf(w, \" min=%.2fms median=%.2fms max=%.2fms\\n\",\n\t\t\t\tfloat64(h.Min())/float64(time.Millisecond),\n\t\t\t\tps[0]/float64(time.Millisecond),\n\t\t\t\tfloat64(h.Max())/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" %%iles (ms): 75=%.2f 95=%.2f 99=%.2f 99.9=%.2f\\n\",\n\t\t\t\tps[1]/float64(time.Millisecond),\n\t\t\t\tps[2]/float64(time.Millisecond),\n\t\t\t\tps[3]/float64(time.Millisecond),\n\t\t\t\tps[4]/float64(time.Millisecond))\n\t\tcase metrics.Meter:\n\t\t\tm := metric.Snapshot()\n\t\t\tfmt.Fprintf(w, \"meter %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count: %d\\n\", m.Count())\n\t\t\tfmt.Fprintf(w, \" rates: 1m=%.2f 5m=%.2f 15m=%.2f mean=%.2f\\n\", m.Rate1(), m.Rate5(), m.Rate15(), m.RateMean())\n\t\tcase metrics.Timer:\n\t\t\tt := metric.Snapshot()\n\t\t\tps := t.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})\n\t\t\tfmt.Fprintf(w, \"timer %s\\n\", namedMetric.name)\n\t\t\tfmt.Fprintf(w, \" count=%d, mean=%.2fms, stddev=%.2fms\\n\",\n\t\t\t\tt.Count(), t.Mean()/float64(time.Millisecond), t.StdDev()/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" min=%.2fms median=%.2fms max=%.2fms\\n\",\n\t\t\t\tfloat64(t.Min())/float64(time.Millisecond),\n\t\t\t\tps[0]/float64(time.Millisecond),\n\t\t\t\tfloat64(t.Max())/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" %%iles (ms): 75=%.2f 95=%.2f 99=%.2f 99.9=%.2f\\n\",\n\t\t\t\tps[1]/float64(time.Millisecond),\n\t\t\t\tps[2]/float64(time.Millisecond),\n\t\t\t\tps[3]/float64(time.Millisecond),\n\t\t\t\tps[4]/float64(time.Millisecond))\n\t\t\tfmt.Fprintf(w, \" rates: 1m=%.2f 5m=%.2f 15m=%.2f mean=%.2f\\n\", t.Rate1(), t.Rate5(), t.Rate15(), t.RateMean())\n\t\t}\n\t}\n}", "func (hist *Histogram) WriteStatsTo(w io.Writer) (int64, error) {\n\tn, err := fmt.Fprintf(w, \" avg %v; min %v; p50 %v; max %v;\\n p90 %v; p99 %v; p999 %v; p9999 %v;\\n\",\n\t\ttime.Duration(truncate(hist.Average, 3)),\n\t\ttime.Duration(truncate(hist.Minimum, 3)),\n\t\ttime.Duration(truncate(hist.P50, 3)),\n\t\ttime.Duration(truncate(hist.Maximum, 3)),\n\n\t\ttime.Duration(truncate(hist.P90, 3)),\n\t\ttime.Duration(truncate(hist.P99, 3)),\n\t\ttime.Duration(truncate(hist.P999, 3)),\n\t\ttime.Duration(truncate(hist.P9999, 3)),\n\t)\n\treturn int64(n), err\n}", "func printStats(stats []statisic, hash string) {\n\tw := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.AlignRight|tabwriter.Debug)\n\n\tfmt.Fprintf(w, \"%s(w=%d):\\n\", hash, sketchWidth)\n\tfmt.Fprintf(w, \"data set\\tmax. abs.\\tavg. abs.\\tmax. rel.\\tavg. rel.\\t# exact\\n\")\n\n\tfor i := 0; i < len(stats); i++ {\n\t\tstat := stats[i]\n\t\tfmt.Fprintf(w, \"%s\\t%d\\t%d\\t%.2f\\t%.2f\\t%d\\n\", filePaths[i], stat.maxAbs, stat.avgAbs, stat.maxRel, stat.avgRel, 100-stat.misses)\n\t}\n\tfmt.Fprintln(w)\n\tw.Flush()\n}", "func (w *StatsDWriter) Write(results Summary) error {\n\tfor k, v := range results {\n\t\t_, err := fmt.Fprintf(w.writer, \"%s:%d|s\\n\", k, v)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func DownloadReport(w io.Writer, pct, count, total int64) int64 {\n\tpct_ := count * 100 / total\n\tif pct_ > pct {\n\t\tfmt.Fprintf(w, \" ...%d MB written (%d%%)\\n\", count/1e6, pct_)\n\t}\n\treturn pct_\n}", "func (te *TelemetryEmitter) emitHistogram(metric Metric, timestamp time.Time) error {\n\thist, ok := metric.value.(*dto.Histogram)\n\tif !ok {\n\t\treturn fmt.Errorf(\"unknown histogram metric type for %q: %T\", metric.name, metric.value)\n\t}\n\n\tif m, ok := te.deltaCalculator.CountMetric(metric.name+\".sum\", metric.attributes, hist.GetSampleSum(), timestamp); ok {\n\t\tte.harvester.RecordMetric(m)\n\t}\n\n\tmetricName := metric.name + \".buckets\"\n\tbuckets := make(histogram.Buckets, 0, len(hist.Bucket))\n\tfor _, b := range hist.GetBucket() {\n\t\tupperBound := b.GetUpperBound()\n\t\tcount := float64(b.GetCumulativeCount())\n\t\tif !math.IsInf(upperBound, 1) {\n\t\t\tbucketAttrs := copyAttrs(metric.attributes)\n\t\t\tbucketAttrs[\"histogram.bucket.upperBound\"] = upperBound\n\t\t\tif m, ok := te.deltaCalculator.CountMetric(metricName, bucketAttrs, count, timestamp); ok {\n\t\t\t\tte.harvester.RecordMetric(m)\n\t\t\t}\n\t\t}\n\t\tbuckets = append(\n\t\t\tbuckets,\n\t\t\thistogram.Bucket{\n\t\t\t\tUpperBound: upperBound,\n\t\t\t\tCount: count,\n\t\t\t},\n\t\t)\n\t}\n\n\tvar results error\n\tmetricName = metric.name + \".percentiles\"\n\tfor _, p := range te.percentiles {\n\t\tv, err := histogram.Percentile(p, buckets)\n\t\tif err != nil {\n\t\t\tif results == nil {\n\t\t\t\tresults = err\n\t\t\t} else {\n\t\t\t\tresults = fmt.Errorf(\"%v: %w\", err, results)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tpercentileAttrs := copyAttrs(metric.attributes)\n\t\tpercentileAttrs[\"percentile\"] = p\n\t\tte.harvester.RecordMetric(telemetry.Gauge{\n\t\t\tName: metricName,\n\t\t\tAttributes: percentileAttrs,\n\t\t\tValue: v,\n\t\t\tTimestamp: timestamp,\n\t\t})\n\t}\n\n\treturn results\n}", "func writeStatsToFile(posts []model.RedditPost) error {\n\n\tgroupedPosts := groupBySubreddit(posts)\n\n\tcountList := [][]model.RedditPost{}\n\n\t// convert to list\n\tfor _, v := range groupedPosts {\n\t\tcountList = append(countList, v)\n\t}\n\n\t// sort by post count\n\tsort.Slice(countList, func(i, j int) bool {\n\t\treturn len(countList[i]) > len(countList[j])\n\t})\n\n\tdata := [][]string{}\n\n\tfor _, v := range countList {\n\t\tescapedTitle := strings.ReplaceAll(v[0].Title, \"|\", \"\\\\|\")\n\t\ttitle := \"[\" + escapedTitle + \"]\" + \"(https://www.reddit.com\" + v[0].Permalink + \")\"\n\t\tdata = append(data, []string{v[0].Subreddit, strconv.Itoa(len(v)), title, strconv.Itoa(v[0].Score)})\n\t}\n\n\tfile, _ := os.Create(\"README.md\")\n\n\ttable := tablewriter.NewWriter(file)\n\ttable.SetAutoWrapText(false)\n\ttable.SetHeader([]string{\"Subreddit\", \"Total\", \"Top Post\", \"Score\"})\n\ttable.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false})\n\ttable.SetCenterSeparator(\"|\")\n\ttable.AppendBulk(data) // Add Bulk Data\n\ttable.Render()\n\n\treturn nil\n}", "func writeStats(to *os.File, final bool, s, t stats.Stats) {\n\tp := fmt.Fprintf\n\tpn := prettyNumber\n\tpb := prettyNumBytes\n\tpl := prettyLatency\n\tpt := prettyTimeStamp\n\tif final {\n\t\twriteStatsHeader(to)\n\t\tp(to, statsPrintHeader, pt(), \"Put\",\n\t\t\tpn(t.TotalPuts()),\n\t\t\tpb(t.TotalPutBytes()),\n\t\t\tpl(t.MinPutLatency(), t.AvgPutLatency(), t.MaxPutLatency()),\n\t\t\tpb(t.PutThroughput(time.Now())),\n\t\t\tpn(t.TotalErrPuts()))\n\t\tp(to, statsPrintHeader, pt(), \"Get\",\n\t\t\tpn(t.TotalGets()),\n\t\t\tpb(t.TotalGetBytes()),\n\t\t\tpl(t.MinGetLatency(), t.AvgGetLatency(), t.MaxGetLatency()),\n\t\t\tpb(t.GetThroughput(time.Now())),\n\t\t\tpn(t.TotalErrGets()))\n\t} else {\n\t\t// show interval stats; some fields are shown of both interval and total, for example, gets, puts, etc\n\t\tif s.TotalPuts() != 0 {\n\t\t\tp(to, statsPrintHeader, pt(), \"Put\",\n\t\t\t\tpn(s.TotalPuts())+\"(\"+pn(t.TotalPuts())+\")\",\n\t\t\t\tpb(s.TotalPutBytes())+\"(\"+pb(t.TotalPutBytes())+\")\",\n\t\t\t\tpl(s.MinPutLatency(), s.AvgPutLatency(), s.MaxPutLatency()),\n\t\t\t\tpb(s.PutThroughput(time.Now()))+\"(\"+pb(t.PutThroughput(time.Now()))+\")\",\n\t\t\t\tpn(s.TotalErrPuts())+\"(\"+pn(t.TotalErrPuts())+\")\")\n\t\t}\n\t\tif s.TotalGets() != 0 {\n\t\t\tp(to, statsPrintHeader, pt(), \"Get\",\n\t\t\t\tpn(s.TotalGets())+\"(\"+pn(t.TotalGets())+\")\",\n\t\t\t\tpb(s.TotalGetBytes())+\"(\"+pb(t.TotalGetBytes())+\")\",\n\t\t\t\tpl(s.MinGetLatency(), s.AvgGetLatency(), s.MaxGetLatency()),\n\t\t\t\tpb(s.GetThroughput(time.Now()))+\"(\"+pb(t.GetThroughput(time.Now()))+\")\",\n\t\t\t\tpn(s.TotalErrGets())+\"(\"+pn(t.TotalErrGets())+\")\")\n\t\t}\n\t}\n}", "func (w *progressWriter) Write(p []byte) (n int, e error) {\n\tn = len(p)\n\tw.current += int64(n)\n\tif !w.silent {\n\t\tpercent := float64(w.current) * 100 / float64(w.total)\n\t\tfmt.Printf(\"\\rReceived %d bytes in %d (%d%%)\", w.current, w.total, int(percent))\n\t}\n\treturn\n}", "func printResults() {\n\n\t// collect stats\n\ttotalLines := 0\n\ttotalCode := 0\n\ttotalComments := 0\n\tfor _, file := range filesStats {\n\t\ttotalLines = totalLines + file.TotalLines\n\t\ttotalCode = totalCode + file.CodeLines\n\t\ttotalComments = totalComments + file.CommentLines\n\t}\n\tfmt.Println(\"Overall stats:\")\n\tfmt.Printf(\" Number of files: %v\\n\", len(filesStats))\n\tfmt.Printf(\" Total lines: %v\\n\", totalLines)\n\tfmt.Printf(\" Code lines: %v\\n\", totalCode)\n\tfmt.Printf(\" Comment lines: %v\\n\", totalComments)\n\n\t// statistics for extensions\n\tif printExts == true {\n\t\t// NOTE: Sadly colprint accepts only slice, not map\n\t\t// thus conversion is needed.\n\t\ts := []*models.ExtensionStats{}\n\t\tfor _, e := range extensionStats {\n\t\t\ts = append(s, e)\n\t\t}\n\t\tfmt.Println(\"\\nStats by extensions\")\n\t\tcolprint.Print(s)\n\t\tfmt.Println()\n\t}\n\n\t// statistics for individual files\n\tif printFiles == true {\n\t\tfmt.Println(\"\\nStats by files:\")\n\t\tcolprint.Print(filesStats)\n\t}\n\tfmt.Println()\n}", "func printReport(r reports.Report, w io.Writer, minPriority int8, pal *palette) {\n\n\tc.Fprint(w, \"\\n\")\n\tc.Fprint(w, c.FHeader(r.Title))\n\tc.Fprint(w, \"\\n\")\n\n\tfor _, ch := range r.Chunks {\n\t\tif ch.Priority >= minPriority {\n\t\t\tprintChunk(ch, w, minPriority, pal)\n\t\t}\n\t}\n\n\tc.Fprint(w, \"\\n\")\n}", "func (pw *ProgressWriter) Write(b []byte) (int, error) {\n\tif pw.done == 0 {\n\t\tpw.start = time.Now()\n\t}\n\tpw.done += int64(len(b))\n\tpercent := int((pw.done * 100) / pw.total)\n\twidth := 10\n\tprogress := (width * percent) / 100\n\tbps := float64(pw.done) / time.Now().Sub(pw.start).Seconds()\n\tbar := fmt.Sprintf(\"%-*s\", width, strings.Repeat(\"#\", int(progress)))\n\tspeed := fmt.Sprintf(\"%s/s %3.3s%%\", human.ByteSize(bps), strconv.Itoa(percent))\n\tpw.pm.Working(pw.key, bar, speed)\n\t// time.Sleep(2.4e7)\n\treturn pw.w.Write(b)\n}", "func (collStatList *CollectionStatList) Export(ch chan<- prometheus.Metric) {\n\tfor _, member := range collStatList.Members {\n\t\tls := prometheus.Labels{\n\t\t\t\"db\": member.Database,\n\t\t\t\"coll\": member.Name,\n\t\t}\n\t\tcollectionSize.With(ls).Set(float64(member.Size))\n\t\tcollectionObjectCount.With(ls).Set(float64(member.Count))\n\t\tcollectionAvgObjSize.With(ls).Set(float64(member.AvgObjSize))\n\t\tcollectionStorageSize.With(ls).Set(float64(member.StorageSize))\n\t\tcollectionIndexes.With(ls).Set(float64(member.Indexes))\n\t\tcollectionIndexesSize.With(ls).Set(float64(member.IndexesSize))\n\t}\n\tcollectionSize.Collect(ch)\n\tcollectionObjectCount.Collect(ch)\n\tcollectionAvgObjSize.Collect(ch)\n\tcollectionStorageSize.Collect(ch)\n\tcollectionIndexes.Collect(ch)\n\tcollectionIndexesSize.Collect(ch)\n}", "func fprintStats(w io.Writer, q *QueryBenchmarker) {\n\tmaxKeyLength := 0\n\tkeys := make([]string, 0, len(q.statMapping))\n\tfor k := range q.statMapping {\n\t\tif len(k) > maxKeyLength {\n\t\t\tmaxKeyLength = len(k)\n\t\t}\n\t\tkeys = append(keys, k)\n\t}\n\tsort.Strings(keys)\n\tfor _, k := range keys {\n\t\tv := q.statMapping[k]\n\t\tminRate := 1e3 / v.Min\n\t\tmeanRate := 1e3 / v.Mean\n\t\tmaxRate := 1e3 / v.Max\n\t\tpaddedKey := fmt.Sprintf(\"%s\", k)\n\t\tfor len(paddedKey) < maxKeyLength {\n\t\t\tpaddedKey += \" \"\n\t\t}\n\t\tkStats := make(map[string]interface{})\n\t\tkStats[\"min\"] = v.Min\n\t\tkStats[\"minRate\"] = minRate\n\t\tkStats[\"mean\"] = v.Mean\n\t\tkStats[\"meanRate\"] = meanRate\n\t\tkStats[\"max\"] = v.Max\n\t\tkStats[\"maxRate\"] = maxRate\n\t\tkStats[\"count\"] = v.Count\n\t\tkStats[\"sum\"] = v.Sum / 1e3\n\t\tq.json[k] = kStats\n\t\tif !q.doJson {\n\t\t\t_, err := fmt.Fprintf(w, \"%s : min: %8.2fms (%7.2f/sec), mean: %8.2fms (%7.2f/sec), max: %7.2fms (%6.2f/sec), count: %8d, sum: %5.1fsec \\n\", paddedKey, v.Min, minRate, v.Mean, meanRate, v.Max, maxRate, v.Count, v.Sum/1e3)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\t\t}\n\t}\n\tq.json[\"totalQueries\"] = q.totalQueries\n\tq.json[\"wallClockTime\"] = q.wallTook.Seconds()\n\tq.json[\"queryRate\"] = float64(q.totalQueries) / float64(q.wallTook.Seconds())\n\tq.json[\"workers\"] = q.workers\n\tq.json[\"batchSize\"] = q.batchSize\n\tif q.doJson {\n\t\tfor k, v := range q.json {\n\t\t\tif _, err := json.Marshal(v); err != nil {\n\t\t\t\tq.json[k] = \"\"\n\t\t\t}\n\t\t}\n\t\tb, err := json.Marshal(q.json)\n\t\tif err != nil {\n\t\t\tlog.Println(\"error:\", err)\n\t\t}\n\t\tos.Stdout.Write(b)\n\t}\n}", "func ( crawler *SingleCrawler ) Print() error {\n\n if err1 := IsOk(crawler); err1 != nil{\n return err1\n }\n\n stdout := os.Stdout\n outfile := stdout\n duped := true\n\n outfile, err := os.OpenFile( crawler.Filename, os.O_WRONLY | os.O_CREATE, 0644 )\n if err != nil {\n glog.Error(\"Unable to open requested file for writing. Defaulting to std out.\")\n duped = false\n } else{\n os.Stdout = outfile\n }\n\n fmt.Printf(\"SiteMap from starting URL %s, total pages found %d.\\n\\n\\n\", crawler.Site.String(), crawler.NumPages )\n for i := 0; i < crawler.NumPages; i++ {\n crawler.Sitemap[i].Print(crawler.PRINT_LIMIT)\n }\n\n if duped == true {\n outfile.Close()\n os.Stdout = stdout\n }\n\n return nil\n\n}", "func WriteHistogramSummary(scope *Scope, writer tf.Output, step tf.Output, tag tf.Output, values tf.Output) (o *tf.Operation) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"WriteHistogramSummary\",\n\t\tInput: []tf.Input{\n\t\t\twriter, step, tag, values,\n\t\t},\n\t}\n\treturn scope.AddOperation(opspec)\n}", "func AnalyzeAllResponses(a *Analyzer, ar []*Response) {\n f, err := os.Create(\"swing-data.csv\")\n if err != nil { panic(err) }\n defer f.Close()\n w := csv.NewWriter(f)\n defer w.Flush()\n Range := histogram.Range(-1.0, 200, .01)\n h, err := histogram.NewHistogram(Range)\n if err != nil {\n panic(err)\n }\n for _, resp := range ar {\n for _, oi := range a.AnalyzeStock(resp) {\n var toWrite = []string{\n strconv.FormatFloat(oi.Swing, 'f', 4, 64),\n strconv.FormatFloat(oi.Ret, 'f', 4, 64),\n }\n w.Write(toWrite)\n h.Add(oi.Ret)\n }\n }\n fmt.Println(\"MEAN: \", h.Mean())\n fmt.Println(\"SIGMA \", h.Sigma())\n}", "func (benchmark *BenchmarkStat) PrintStats() {\n\tprintable := \"\"\n\tprintable += \"\\nSTATISTICS\\n\"\n\tprintable += \"===============\\n\"\n\tfor _, val := range benchmark.options {\n\t\tswitch val {\n\t\tcase Mean:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Mode:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Highest:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Lowest:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Sum:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase Range:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tcase All:\n\t\t\tif st, err := benchmark.GetStat(val); err == nil {\n\t\t\t\tprintable += st\n\t\t\t}\n\t\tdefault:\n\t\t\tif _, err := benchmark.GetStat(val); err != nil {\n\t\t\t\tprintable += err.Error()\n\t\t\t}\n\n\t\t}\n\t}\n\n\ts := printable\n\tfmt.Println(s)\n}", "func (s Set) PrintBasicHistogram() {\r\n\tsorted := make(Set, len(s))\r\n\tcopy(sorted, s)\r\n\tsorted.Sort()\r\n\r\n\tstd := s.GetStd()\r\n\tmean := s.GetMean()\r\n\r\n\tfor s := -3.; s < 3; s++ {\r\n\t\tmin, max := mean+s*std, mean+(s+1)*std\r\n\t\tgap := 100.0 * (float64(sorted.IndicesBetween(min, max)) / float64(len(sorted)))\r\n\t\tfmt.Printf(\"Between %v and %v stds: %v%% of the data\\n\", s, s+1, gap)\r\n\t}\r\n}", "func (h *Histogram) Export() *HistogramData {\n\tvar res HistogramData\n\tres.Count = h.Counter.Count\n\tres.Min = h.Counter.Min\n\tres.Max = h.Counter.Max\n\tres.Sum = h.Counter.Sum\n\tres.Avg = h.Counter.Avg()\n\tres.StdDev = h.Counter.StdDev()\n\tmultiplier := h.Divider\n\toffset := h.Offset\n\t// calculate the last bucket index\n\tlastIdx := -1\n\tfor i := numBuckets - 1; i >= 0; i-- {\n\t\tif h.Hdata[i] > 0 {\n\t\t\tlastIdx = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif lastIdx == -1 {\n\t\treturn &res\n\t}\n\n\t// previous bucket value:\n\tprev := histogramBucketValues[0]\n\tvar total int64\n\tctrTotal := float64(h.Count)\n\t// export the data of each bucket of the histogram\n\tfor i := 0; i <= lastIdx; i++ {\n\t\tif h.Hdata[i] == 0 {\n\t\t\t// empty bucket: skip it but update prev which is needed for next iter\n\t\t\tif i < numValues {\n\t\t\t\tprev = histogramBucketValues[i]\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\tvar b Bucket\n\t\ttotal += int64(h.Hdata[i])\n\t\tif len(res.Data) == 0 {\n\t\t\t// First entry, start is min\n\t\t\tb.Start = h.Min\n\t\t} else {\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t}\n\t\tb.Percent = 100. * float64(total) / ctrTotal\n\t\tif i < numValues {\n\t\t\tcur := histogramBucketValues[i]\n\t\t\tb.End = multiplier*float64(cur) + offset\n\t\t\tprev = cur\n\t\t} else {\n\t\t\t// Last Entry\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t\tb.End = h.Max\n\t\t}\n\t\tb.Count = int64(h.Hdata[i])\n\t\tres.Data = append(res.Data, b)\n\t}\n\tres.Data[len(res.Data)-1].End = h.Max\n\treturn &res\n}", "func WriteScrapeResults(visitedMap map[string]struct{}, repos []GoRepo, filename string) {\n\tfile, err := os.OpenFile(filename, os.O_WRONLY|os.O_APPEND|os.O_RDWR, 0666)\n\tdefer file.Close()\n\tif err != nil {\n\t\tlog.Fatalf(\"could not open file %v\", err)\n\t}\n\toutput := csv.NewWriter(file)\n\tvar results [][]string\n\tfor _, repo := range repos {\n\t\tif _, ok := visitedMap[repo.ID()]; ok {\n\t\t\tcontinue\n\t\t}\n\t\tresults = append(results, []string{\n\t\t\trepo.Owner,\n\t\t\trepo.Repo,\n\t\t\tfmt.Sprintf(\"%s\", repo.CreatedAt.Format(time.RFC3339)),\n\t\t})\n\t\tvisitedMap[repo.ID()] = struct{}{}\n\t}\n\terr = output.WriteAll(results)\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not write to file %s: %v\", filename, err)\n\t}\n}", "func (status *TopStatus) Export(ch chan<- prometheus.Metric) {\n\tstatus.TopStats.Export(ch)\n}", "func (a ASCIITableWriter) Write(out io.Writer, p *api.Project) error {\n\ttable := tablewriter.NewWriter(out)\n\ttable.SetHeader([]string{\"RELEASE\", \"Downloads\"})\n\n\tfor _, r := range p.Releases() {\n\t\ttable.Append([]string{r.Name, strconv.Itoa(r.DownloadCount())})\n\t}\n\ttable.Render()\n\treturn nil\n}", "func (b *bar) writer() {\n\tb.update()\n\tfor {\n\t\tselect {\n\t\tcase <-b.finishChan:\n\t\t\treturn\n\t\tcase <-time.After(b.opts.RefreshRate):\n\t\t\tb.update()\n\t\t}\n\t}\n}", "func (wc WriteCounter) PrintProgress() {\n\t// Clear the line by using a character return to go back to the start and remove\n\t// the remaining characters by filling it with spaces\n\tfmt.Printf(\"\\r%s\", strings.Repeat(\" \", 50))\n\n\t// Return again and print current status of download\n\t// We use the humanize package to print the bytes in a meaningful way (e.g. 10 MB)\n\tfmt.Printf(\"\\rDownloading... %s complete\", humanize.Bytes(wc.Total))\n}", "func Export(output string, gold string, results []string) {\n fi, err := os.Open(gold)\n if err != nil { glog.Fatal(err) }\n defer fi.Close()\n goldFiles, err := filepath.Glob(gold + \"/query_*\")\n if err != nil { glog.Fatal(err) }\n\n limit := make(map[string][]AggregatedMeasurement)\n limitRe := regexp.MustCompile(\"limit[0-9]+$\")\n for _, file := range goldFiles {\n gd := loadGold(file)\n _, name := filepath.Split(file)\n for _,res := range results {\n set := make(map[string][]AggregatedMeasurement)\n matches, err := filepath.Glob(filepath.Join(res, name) + \"-limit*\")\n if err != nil { glog.Fatal(err) }\n for _, match := range matches {\n aggm := compare(gd, match)\n l := filepath.Base(match)\n set[l] = append(set[l], aggm)\n }\n for k,v := range set {\n avg := avgAggregatedMeasurement(v)\n l := limitRe.FindString(k)\n limit[l] = append(limit[l], avg)\n }\n }\n }\n toLatex(output, limit, len(results))\n}", "func (s *counts) Report(title string, opts *options) {\n\tif opts.ShowLines {\n\t\tfmt.Printf(\"%8v\", s.lines)\n\t}\n\tif opts.ShowWords {\n\t\tfmt.Printf(\"%8v\", s.words)\n\t}\n\tif opts.ShowChars {\n\t\tfmt.Printf(\"%8v\", s.chars)\n\t}\n\tfmt.Printf(\" %8v\\n\", title)\n}", "func (wc writeCounter) PrintProgress() {\n\t// Clear the line by using a character return to go back to the start and remove\n\t// the remaining characters by filling it with spaces\n\tfmt.Printf(\"\\r%s\", strings.Repeat(\" \", 100))\n\n\t// Return again and print current status of download\n\t// We use the humanize package to print the bytes in a meaningful way (e.g. 10 MB)\n\tfmt.Printf(\"\\rDownloading (%s) %s complete\", wc.Name, humanize.Bytes(wc.Total))\n}", "func (m *Main) printStats() *time.Ticker {\n\tt := time.NewTicker(time.Second * 10)\n\tstart := time.Now()\n\tgo func() {\n\t\tfor range t.C {\n\t\t\tduration := time.Since(start)\n\t\t\tbytes := m.BytesProcessed()\n\t\t\tlog.Printf(\"Bytes: %s, Records: %v, Duration: %v, Rate: %v/s, %v rec/s\", pdk.Bytes(bytes), m.totalRecs.Get(), duration, pdk.Bytes(float64(bytes)/duration.Seconds()), float64(m.totalRecs.Get())/duration.Seconds())\n\t\t}\n\t}()\n\treturn t\n}", "func (c *Collector) String() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\n\tif len(c.Buckets) == 0 {\n\t\treturn \"\"\n\t}\n\n\tnLen := printfLen(\"%.2f\", c.Min)\n\tif maxLen := printfLen(\"%.2f\", c.Max); maxLen > nLen {\n\t\tnLen = maxLen\n\t}\n\t// if c.Max is +Inf, the second-largest element can be the longest.\n\tif maxLen := printfLen(\"%.2f\", c.Buckets[len(c.Buckets)-1].Min); maxLen > nLen {\n\t\tnLen = maxLen\n\t}\n\n\tcLen := printfLen(\"%d\", c.Count)\n\tsLen := 0\n\n\tvar res strings.Builder\n\n\tfmt.Fprintf(&res, \"[%*s %*s] %*s total%%\", nLen, \"min\", nLen, \"max\", cLen, \"cnt\")\n\n\tif c.PrintSum {\n\t\tsLen = printfLen(\"%.2f\", c.Sum)\n\t\tfmt.Fprintf(&res, \" %*s\", sLen, \"sum\")\n\t}\n\n\tfmt.Fprintf(&res, \" (%d events)\\n\", c.Count)\n\n\tfor _, b := range c.Buckets {\n\t\tpercent := float64(100*b.Count) / float64(c.Count)\n\n\t\tfmt.Fprintf(&res, \"[%*.2f %*.2f] %*d %5.2f%%\", nLen, b.Min, nLen, b.Max, cLen, b.Count, percent)\n\n\t\tif c.PrintSum {\n\t\t\tfmt.Fprintf(&res, \" %*.2f\", sLen, b.Sum)\n\t\t}\n\n\t\tif dots := strings.Repeat(\".\", int(percent)); len(dots) > 0 {\n\t\t\tfmt.Fprint(&res, \" \", dots)\n\t\t}\n\n\t\tfmt.Fprintln(&res)\n\t}\n\n\treturn res.String()\n}", "func writeStatsHeader(to *os.File) {\n\tfmt.Fprintln(to)\n\tfmt.Fprintf(to, statsPrintHeader,\n\t\t\"Time\", \"OP\", \"Count\", \"Total Bytes\", \"Latency(min, avg, max)\", \"Throughput\", \"Error\")\n}", "func WithProgress(writer io.Writer, reader io.Reader, prefix string) error {\n\tdefer color.Unset()\n\n\tvar printed bool\n\n\tdefer func() {\n\t\tif printed {\n\t\t\tfmt.Println()\n\t\t}\n\t}()\n\n\twsz, _ := term.GetWinsize(0)\n\n\trd := bufio.NewReaderSize(reader, readerSize)\n\n\tcount := float64(0)\n\tbuf := make([]byte, readerSize)\n\tt := time.Now()\n\tfor {\n\t\trn, err := rd.Read(buf)\n\t\tcount += float64(rn)\n\n\t\tif err == io.EOF {\n\t\t\tif rn > 0 {\n\t\t\t\tgoto write\n\t\t\t} else {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif NoOut {\n\t\t\tgoto write\n\t\t}\n\n\t\tif time.Since(t) > 100*time.Millisecond && !NoTTY && wsz.Width != 0 {\n\t\t\tprinted = true\n\t\t\tfmt.Print(\"\\r\")\n\n\t\t\tmbs := fmt.Sprintf(\"%.02fMB\", count/megaByte)\n\n\t\t\tcolor.New(color.FgWhite, color.Bold).Printf(\"+++ \")\n\n\t\t\tjustifiedWidth := int(wsz.Width) - len(mbs) - 9\n\t\t\tif justifiedWidth < 0 {\n\t\t\t\tgoto write\n\t\t\t}\n\n\t\t\tif len(prefix) > int(justifiedWidth) {\n\t\t\t\tprefix = prefix[:int(justifiedWidth)] + \"...\"\n\t\t\t}\n\n\t\t\tcolor.New(color.FgRed, color.Bold).Printf(\"%s: \", prefix)\n\t\t\tcolor.New(color.FgWhite).Print(mbs)\n\n\t\t\tt = time.Now()\n\t\t}\n\n\twrite:\n\t\t_, werr := writer.Write(buf[:rn])\n\t\tif werr != nil {\n\t\t\treturn werr\n\t\t}\n\n\t\tif err == io.EOF {\n\t\t\treturn nil\n\t\t}\n\t}\n}", "func Flush() error {\n\tif printer.Quiet {\n\t\treturn nil\n\t}\n\n\topts := printOpts{\n\t\tformat: printer.Format,\n\t\tsingle: printer.Single,\n\t\tnoNewline: printer.NoNewline,\n\t}\n\n\tcmd := printer.cmd\n\tif cmd != nil {\n\t\tshortStat, err := printer.cmd.Flags().GetBool(\"short-stat\")\n\t\tif err == nil && printer.cmd.Name() == \"list\" && printer.cmd.Parent().Name() != \"auth\" {\n\t\t\topts.shortStat = shortStat\n\t\t}\n\t}\n\n\tb, err := printer.linesToBytes(opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlines := lineCount(b)\n\n\tisTTY := checkInteractiveTerminal() == nil\n\tvar enablePager bool\n\ttermHeight, err := termHeight(os.Stdout)\n\tif err == nil {\n\t\tenablePager = isTTY && (termHeight < lines) // calculate if we should enable paging\n\t}\n\n\tpager := os.Getenv(\"PAGER\")\n\tif enablePager {\n\t\tenablePager = pager != \"\"\n\t}\n\n\topts.usePager = enablePager && printer.pager\n\topts.pagerPath = pager\n\n\terr = printer.printBytes(b, opts)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// after all, print errors\n\tprinter.printErrors()\n\n\tdefer func() {\n\t\tprinter.Lines = []interface{}{}\n\t\tprinter.ErrorLines = []interface{}{}\n\t}()\n\n\tif cmd == nil || cmd.Name() != \"list\" || printer.cmd.Parent().Name() == \"auth\" {\n\t\treturn nil\n\t}\n\n\t// the command is a list command, we may want to\n\t// take care of the stat flags\n\tnoStat, err := cmd.Flags().GetBool(\"no-stat\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// print stats\n\tswitch {\n\tcase noStat:\n\t\t// do nothing\n\tcase !opts.shortStat:\n\t\t// should not go to pager\n\t\tif isTTY && !enablePager {\n\t\t\tfmt.Fprintf(printer.eWriter, \"\\n\") // add a one line space before statistical data\n\t\t}\n\t\tfallthrough\n\tcase len(printer.Lines) > 0:\n\t\tentity := cmd.Parent().Name()\n\t\tcontainer := strings.TrimSuffix(printer.serverAddr, \"api/v4\")\n\t\tif container != \"\" {\n\t\t\tcontainer = fmt.Sprintf(\" on %s\", container)\n\t\t}\n\t\tfmt.Fprintf(printer.eWriter, \"There are %d %ss%s\\n\", len(printer.Lines), entity, container)\n\t}\n\n\treturn nil\n}", "func PrintGCSummary(w io.Writer) {\r\n\tmemStats := &runtime.MemStats{}\r\n\truntime.ReadMemStats(memStats)\r\n\tgcstats := &debug.GCStats{PauseQuantiles: make([]time.Duration, 100)}\r\n\tdebug.ReadGCStats(gcstats)\r\n\r\n\tprintGC(memStats, gcstats, w)\r\n}", "func Fprint(w io.Writer, a ...interface{}) (n int, err error) { return fmt.Fprint(w, a...) }", "func (pt *ProgressWriter) Write(p []byte) (int, error) {\n\t// Do normal writer tasks\n\tn, err := pt.WriteCloser.Write(p)\n\n\t// Do the actual progress tracking\n\tif pt.Tracker != nil {\n\t\tpt.Tracker.total += int64(n)\n\t\tpt.Tracker.update(n)\n\t}\n\n\treturn n, err\n}", "func printDistribution(dmap map[string]int) {\n\tfor {\n\t\ttime.Sleep(time.Second * 5)\n\t\tvar total float64\n\t\tlock.Lock()\n\t\tkeys := []string{}\n\t\tfor k, n := range dmap {\n\t\t\ttotal = total + float64(n)\n\t\t\tkeys = append(keys, k)\n\t\t}\n\t\tsort.Strings(keys)\n\t\tfor _, k := range keys {\n\t\t\tn := dmap[k]\n\t\t\tvar nf float64 = float64(n)\n\t\t\tfmt.Printf(\"%s : %3.2f%% (%d/%d)\\n\", k, float64(nf*100.0)/total, n, int(total))\n\t\t\tdelete(dmap, k) // clear on read\n\t\t}\n\t\tlock.Unlock()\n\t\tfmt.Printf(\"------------------------\\n\")\n\t}\n}", "func (c *Contents) PrintSummary(head int) {\n\tglog.Infof(\"Loaded from %s\\n\", c.filePath)\n\tglog.Infof(\" %s\\n\", &c.Fingerprint)\n\n\tfor k, v := range c.Metrics {\n\t\tglog.Infof(\" metrics for weight_name: %s\", k)\n\t\tglog.Infof(\" exclude metrics meta %d count %d\\n\", v.Meta, len(v.CellMetrics))\n\t\tfor i := 0; i < len(v.CellMetrics); i++ {\n\t\t\tglog.Infof(\" metrics[%d] weights meta %d count %d\\n\", i, v.CellMetrics[i].WeightsMeta, len(v.CellMetrics[i].Weights))\n\t\t\tfor j := 0; j < head && j < len(v.CellMetrics[i].Weights); j++ {\n\t\t\t\tglog.Infof(\" metrics[%d] weigths[%d] %+v\", i, j, v.CellMetrics[i].Weights[j])\n\t\t\t}\n\n\t\t\tglog.Infof(\" metrics[%d] durations meta %d count %d\\n\", i, v.CellMetrics[i].DurationsMeta, len(v.CellMetrics[i].Durations))\n\t\t\tfor j := 0; j < head && j < len(v.CellMetrics[i].Durations); j++ {\n\t\t\t\tglog.Infof(\" metrics[%d] durations[%d] %+v\", i, j, v.CellMetrics[i].Durations[j])\n\t\t\t}\n\n\t\t\tglog.Infof(\" metrics[%d] distances meta %d count %d\\n\", i, v.CellMetrics[i].DistancesMeta, len(v.CellMetrics[i].Distances))\n\t\t\tfor j := 0; j < head && j < len(v.CellMetrics[i].Distances); j++ {\n\t\t\t\tglog.Infof(\" metrics[%d] distances[%d] %+v\", i, j, v.CellMetrics[i].Distances[j])\n\t\t\t}\n\t\t}\n\t}\n}", "func (p *Printer) Write(w io.Writer, v interface{}) (_ int, err error) {\n\tdefer func() {\n\t\tswitch r := recover().(type) {\n\t\tcase panicSentinel:\n\t\t\terr = r.Err\n\t\tdefault:\n\t\t\tpanic(r)\n\t\tcase nil:\n\t\t\t// no error\n\t\t}\n\t}()\n\n\tcfg := p.Config\n\n\tif len(cfg.Indent) == 0 {\n\t\tcfg.Indent = DefaultIndent\n\t}\n\n\tif cfg.ZeroValueMarker == \"\" {\n\t\tcfg.ZeroValueMarker = DefaultZeroValueMarker\n\t}\n\n\tif cfg.RecursionMarker == \"\" {\n\t\tcfg.RecursionMarker = DefaultRecursionMarker\n\t}\n\n\tcounter := &stream.Counter{\n\t\tTarget: w,\n\t}\n\n\tr := &renderer{\n\t\tIndenter: stream.Indenter{\n\t\t\tTarget: counter,\n\t\t\tIndent: []byte(cfg.Indent),\n\t\t},\n\t\tConfiguration: cfg,\n\t\tRecursionSet: map[uintptr]struct{}{},\n\t}\n\n\trv := reflect.ValueOf(v)\n\tvar rt reflect.Type\n\n\tif rv.Kind() != reflect.Invalid {\n\t\trt = rv.Type()\n\t}\n\n\tr.WriteValue(\n\t\tValue{\n\t\t\tValue: rv,\n\t\t\tDynamicType: rt,\n\t\t\tStaticType: typeOf[any](),\n\t\t\tIsAmbiguousDynamicType: true,\n\t\t\tIsAmbiguousStaticType: true,\n\t\t\tIsUnexported: false,\n\t\t},\n\t)\n\n\treturn counter.Count(), nil\n}", "func (h *PCPHistogram) Percentile(p float64) int64 { return h.h.ValueAtQuantile(p) }", "func printFooter(w io.Writer, info *athena.QueryExecution) {\n\tstats := info.Statistics\n\trunTimeMs := aws.Int64Value(stats.EngineExecutionTimeInMillis)\n\tscannedBytes := aws.Int64Value(stats.DataScannedInBytes)\n\tloc := aws.StringValue(info.ResultConfiguration.OutputLocation)\n\tlog.Printf(\"EngineExecutionTimeInMillis: %d milliseconds\\n\", runTimeMs)\n\tlog.Printf(\"DataScannedInBytes: %d bytes\\n\", scannedBytes)\n\tlog.Printf(\"OutputLocation: %s\\n\", loc)\n\tfmt.Fprintf(w, \"Run time: %.2f seconds | Data scanned: %s\\nLocation: %s\\n\",\n\t\tfloat64(runTimeMs)/1000, FormatBytes(scannedBytes), loc)\n}", "func (pb *Pbar) printValues() {\n\tif pb.printNumbers == 1 {\n\t\tansi.Printf(\"\\u001b[1B\")\n\t\tansi.Printf(\"\\033[2G\\033[0m%d / %d\\u001b[1A\", pb.currentAmount, pb.totalAmount)\n\t} else if pb.printNumbers == 2 {\n\t\tnewPercent := int(100*float32(pb.currentAmount)/float32(pb.totalAmount))\n\t\tif newPercent > pb.currentPercent {\n\t\t\tansi.Printf(\"\\u001b[1B\")\n\t\t\tansi.Printf(\"\\033[2G\\033[0m%d%s\\u001b[1A\", newPercent, \"%\")\n\t\t\tpb.currentPercent = newPercent\n\t\t}\n\t}\n}", "func (networkStats *NetworkStats) Export(ch chan<- prometheus.Metric) {\n\tch <- prometheus.MustNewConstMetric(networkBytesTotalDesc, prometheus.CounterValue, networkStats.BytesIn, \"in_bytes\")\n\tch <- prometheus.MustNewConstMetric(networkBytesTotalDesc, prometheus.CounterValue, networkStats.BytesOut, \"out_bytes\")\n\n\tch <- prometheus.MustNewConstMetric(networkMetricsNumRequestsTotalDesc, prometheus.CounterValue, networkStats.NumRequests)\n}", "func (m *Printer) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.PrinterBase.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetConnectors() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetConnectors()))\n for i, v := range m.GetConnectors() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"connectors\", cast)\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"hasPhysicalDevice\", m.GetHasPhysicalDevice())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteBoolValue(\"isShared\", m.GetIsShared())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastSeenDateTime\", m.GetLastSeenDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"registeredDateTime\", m.GetRegisteredDateTime())\n if err != nil {\n return err\n }\n }\n if m.GetShares() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetShares()))\n for i, v := range m.GetShares() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"shares\", cast)\n if err != nil {\n return err\n }\n }\n if m.GetTaskTriggers() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetTaskTriggers()))\n for i, v := range m.GetTaskTriggers() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"taskTriggers\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func WriteStats(path string, hot, cold []int64) error {\n\t// Copy before sort.\n\tcold = append([]int64{}, cold...)\n\thot = append([]int64{}, hot...)\n\n\tsort.Slice(cold, func(i, j int) bool { return cold[i] < cold[j] })\n\tsort.Slice(hot, func(i, j int) bool { return hot[i] < hot[j] })\n\n\tpackedCold, err := Pack(cold)\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"failed to pack uploaded items\").Err()\n\t}\n\n\tpackedHot, err := Pack(hot)\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"failed to pack not uploaded items\").Err()\n\t}\n\n\tstatsJSON, err := json.Marshal(struct {\n\t\tItemsCold []byte `json:\"items_cold\"`\n\t\tItemsHot []byte `json:\"items_hot\"`\n\t}{\n\t\tItemsCold: packedCold,\n\t\tItemsHot: packedHot,\n\t})\n\tif err != nil {\n\t\treturn errors.Annotate(err, \"failed to marshal stats json\").Err()\n\t}\n\tif err := ioutil.WriteFile(path, statsJSON, 0600); err != nil {\n\t\treturn errors.Annotate(err, \"failed to write stats json\").Err()\n\t}\n\n\treturn nil\n}", "func (p *Percentiles) Flush() ([]processorResult, bool) {\n\n\tsize := len(p.values)\n\tif size == 0 {\n\t\treturn nil, false\n\t}\n\n\tvar results []processorResult\n\tsort.Float64s(p.values)\n\n\tfor fcnName, percent := range p.percents {\n\t\trank := (percent / 100) * (float64(size) + 1)\n\t\tfloor := int(rank)\n\n\t\tif rank < 1 {\n\t\t\tresults = append(results, processorResult{fcnName, p.values[0]})\n\t\t} else if floor >= size {\n\t\t\tresults = append(results, processorResult{fcnName, p.values[size-1]})\n\t\t} else {\n\t\t\tfrac := rank - float64(floor)\n\t\t\tupper := floor + 1\n\t\t\tpercentile := p.values[floor-1] + frac*(p.values[upper-1]-p.values[floor-1])\n\t\t\tresults = append(results, processorResult{fcnName, percentile})\n\t\t}\n\t}\n\n\treturn results, true\n}", "func (as Authors) Write() {\n\tfor _, a := range as {\n\t\tfmt.Println(a.Full_name)\n\t}\n\tnames := func(fullName string) []string { return strings.Split(fullName, \" \") }\n\tfirstName := func(fullName string) string { return names(fullName)[0] }\n\tfuncMap := map[string]interface{}{\n\t\t\"funcName\": func() string { return \"hlsm_authors\" },\n\t\t\"userLogin\": func(fullName string) string {\n\t\t\treturn strings.ToLower(firstName(fullName))\n\t\t},\n\t\t\"firstName\": firstName,\n\t\t\"lastName\": func(fullName string) string { return names(fullName)[1] },\n\t}\n\twriter.Execute(\"authors\", as, funcMap)\n}", "func ReportPrinter(w io.Writer, minPriority int8, colors bool) func(r reports.Report) error {\n\tvar pal *palette\n\tif colors {\n\t\tpal = &colored\n\t} else {\n\t\tpal = &notcolored\n\t}\n\n\tif w == nil {\n\t\tw = os.Stdout\n\t}\n\n\treturn func(r reports.Report) error {\n\t\tprintReport(r, w, minPriority, pal)\n\t\treturn nil\n\t}\n}", "func PrintDownloadPercent(done chan int64, path string, total int64) {\n\n\tvar stop bool = false\n\n\tfor {\n\t\tselect {\n\t\tcase <-done:\n\t\t\tstop = true\n\t\tdefault:\n\n\t\t\tfile, err := os.Open(path)\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\n\t\t\tfi, err := file.Stat()\n\t\t\tif err != nil {\n\t\t\t\tlog.Fatal(err)\n\t\t\t}\n\n\t\t\tsize := fi.Size()\n\n\t\t\tif size == 0 {\n\t\t\t\tsize = 1\n\t\t\t}\n\n\t\t\tvar percent float64 = float64(size) / float64(total) * 100\n\n\t\t\tfmt.Printf(\"%.0f\", percent)\n\t\t\tfmt.Print(\"% ... \")\n\t\t}\n\n\t\tif stop {\n\t\t\tbreak\n\t\t}\n\n\t\ttime.Sleep(time.Second)\n\t}\n}", "func (bc *Benchmark) PrintStats() {\n\tbc.stats.Print()\n}", "func DrawProbabilitiesHistogramScore(p map[int]int) {\n\tvar keys []int\n\tmaxY := 0\n\tmaxX := 0\n\tfor k, freq := range p {\n\t\tkeys = append(keys, k)\n\t\tif maxY < freq {\n\t\t\tmaxY = freq\n\t\t}\n\t\tif maxX < k {\n\t\t\tmaxX = k\n\t\t}\n\t}\n\tsort.Ints(keys)\n\tfmt.Println()\n\tdigits := int(math.Log10(float64(maxX))) + 1\n\tfor i := maxY; i > 0; i-- {\n\t\t// Print Y axes.\n\t\tfmt.Printf(\"%v%%\", indentInt(i, 3))\n\t\tfor _, k := range keys {\n\t\t\tfmt.Print(\" \")\n\t\t\tif p[k] == i {\n\t\t\t\tfor j := 0; j < digits; j++ {\n\t\t\t\t\tfmt.Print(\"▄\")\n\t\t\t\t}\n\t\t\t} else if p[k] > i {\n\t\t\t\tfor j := 0; j < digits; j++ {\n\t\t\t\t\tfmt.Print(\"█\")\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfor j := 0; j < digits; j++ {\n\t\t\t\t\tfmt.Print(\" \")\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tfmt.Print(\"\\n\")\n\t}\n\t// Print X axes.\n\tfmt.Print(\" \")\n\tfor _, k := range keys {\n\t\tfmt.Printf(\"%v\", indentInt(k, digits+1))\n\t}\n\tfmt.Println()\n}", "func (cfw *CoverageDataWriter) Write(metaFileHash [16]byte, args map[string]string, visitor CounterVisitor) error {\n\tif err := cfw.writeHeader(metaFileHash); err != nil {\n\t\treturn err\n\t}\n\treturn cfw.AppendSegment(args, visitor)\n}", "func (m *PrinterDefaults) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n if m.GetColorMode() != nil {\n cast := (*m.GetColorMode()).String()\n err := writer.WriteStringValue(\"colorMode\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"contentType\", m.GetContentType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"copiesPerJob\", m.GetCopiesPerJob())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"dpi\", m.GetDpi())\n if err != nil {\n return err\n }\n }\n if m.GetDuplexMode() != nil {\n cast := (*m.GetDuplexMode()).String()\n err := writer.WriteStringValue(\"duplexMode\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetFinishings() != nil {\n err := writer.WriteCollectionOfStringValues(\"finishings\", SerializePrintFinishing(m.GetFinishings()))\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteBoolValue(\"fitPdfToPage\", m.GetFitPdfToPage())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"inputBin\", m.GetInputBin())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"mediaColor\", m.GetMediaColor())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"mediaSize\", m.GetMediaSize())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"mediaType\", m.GetMediaType())\n if err != nil {\n return err\n }\n }\n if m.GetMultipageLayout() != nil {\n cast := (*m.GetMultipageLayout()).String()\n err := writer.WriteStringValue(\"multipageLayout\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n if m.GetOrientation() != nil {\n cast := (*m.GetOrientation()).String()\n err := writer.WriteStringValue(\"orientation\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"outputBin\", m.GetOutputBin())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"pagesPerSheet\", m.GetPagesPerSheet())\n if err != nil {\n return err\n }\n }\n if m.GetQuality() != nil {\n cast := (*m.GetQuality()).String()\n err := writer.WriteStringValue(\"quality\", &cast)\n if err != nil {\n return err\n }\n }\n if m.GetScaling() != nil {\n cast := (*m.GetScaling()).String()\n err := writer.WriteStringValue(\"scaling\", &cast)\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func PrintDependencyCounts(ch chan DependencyCountItem) {\n\tout := getOutputFile()\n\tfor item := range ch {\n\t\t_, err := fmt.Fprintf(out, \"%s: %d\\n\", item.Dependency, item.Count)\n\t\tif err != nil {\n\t\t\tfatalError(err)\n\t\t}\n\t}\n\tcloseOutputFile(out)\n}", "func PrintProgressBar(progress int64, complete int64) {\n amount := int((int64(progressBarLength-1) * progress) / complete)\n rest := (progressBarLength - 1) - amount\n bar := strings.Repeat(\"=\", amount) + \">\" + strings.Repeat(\".\", rest)\n _, err := os.Stdout.Write([]byte(\"Progress: [\" + bar + \"]\\r\"))\n Checkerr(err)\n}", "func dumpMetrics(t *testing.T, metrics []map[string]float64, outfile string) {\n\toutFile := getOutFile(outfile)\n\n\tf, err := os.OpenFile(outFile, os.O_CREATE|os.O_WRONLY, 0666)\n\trequire.NoError(t, err, \"Failed opening file\")\n\tdefer f.Close()\n\n\theaderSet := make(map[string]bool)\n\tfor _, metric := range metrics {\n\t\tfor area := range metric {\n\t\t\theaderSet[area] = true\n\t\t}\n\t}\n\n\tvar headers []string\n\tfor area := range headerSet {\n\t\theaders = append(headers, area)\n\t}\n\n\twriter := csv.NewWriter(f)\n\n\terr = writer.Write(headers)\n\trequire.NoError(t, err, \"Failed writting file\")\n\twriter.Flush()\n\n\tfor _, metric := range metrics {\n\t\tvar data []string\n\t\tfor _, header := range headers {\n\t\t\tvalue, isPresent := metric[header]\n\t\t\tif isPresent {\n\t\t\t\tvStr := strconv.FormatFloat(value, 'f', -1, 64)\n\t\t\t\tdata = append(data, vStr)\n\t\t\t} else {\n\t\t\t\tdata = append(data, \"\")\n\t\t\t}\n\t\t}\n\t\terr = writer.Write(data)\n\t\trequire.NoError(t, err, \"Failed writting file\")\n\t\twriter.Flush()\n\t}\n}", "func (c *LoggerClient) Histogram(name string, value float64) {\n\tc.print(\"Histogram\", name, value, value)\n}", "func (r *Report) Print() {\n\tresTotal := 0\n\tfor i := range r.StatusCodes {\n\t\tresTotal += r.StatusCodes[i]\n\t}\n\n\terrTotal := 0\n\tfor i := range r.Errors {\n\t\terrTotal += r.Errors[i]\n\t}\n\n\tfmt.Printf(\" Duration: %0.3fs\\n\", r.Duration.Seconds())\n\tfmt.Printf(\" Requests: %d (%0.1f/s) (%0.5fs/r)\\n\",\n\t\tr.RequestCount,\n\t\tfloat64(r.RequestCount)/r.Duration.Seconds(),\n\t\tr.Duration.Seconds()/float64(r.RequestCount),\n\t)\n\n\tif errTotal > 0 {\n\t\tfmt.Printf(\" Errors: %d\\n\", errTotal)\n\t}\n\n\tfmt.Printf(\"Responses: %d (%0.1f/s) (%0.5fs/r)\\n\",\n\t\tresTotal,\n\t\tfloat64(resTotal)/r.Duration.Seconds(),\n\t\tr.Duration.Seconds()/float64(resTotal),\n\t)\n\tfor code, count := range r.StatusCodes {\n\t\tfmt.Printf(\" [%d]: %d\\n\", code, count)\n\t}\n\tfor err, count := range r.Errors {\n\t\tfmt.Printf(\"\\n%d times:\\n%s\\n\", count, err)\n\t}\n}", "func (mw *multiWriterHolder) Write(p []byte) (int, error) {\n\ttype data struct {\n\t\tn int\n\t\terr error\n\t}\n\n\tresults := make(chan data, len(mw.writers))\n\tmw.mu.RLock()\n\tdefer mw.mu.RUnlock()\n\tfor x := range mw.writers {\n\t\tgo func(w io.Writer, p []byte, ch chan<- data) {\n\t\t\tn, err := w.Write(p)\n\t\t\tif err != nil {\n\t\t\t\tch <- data{n, fmt.Errorf(\"%T %w\", w, err)}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tif n != len(p) {\n\t\t\t\tch <- data{n, fmt.Errorf(\"%T %w\", w, io.ErrShortWrite)}\n\t\t\t\treturn\n\t\t\t}\n\t\t\tch <- data{n, nil}\n\t\t}(mw.writers[x], p, results)\n\t}\n\n\tfor range mw.writers {\n\t\t// NOTE: These results do not necessarily reflect the current io.writer\n\t\t// due to the go scheduler and writer finishing at different times, the\n\t\t// response coming from the channel might not match up with the for loop\n\t\t// writer.\n\t\td := <-results\n\t\tif d.err != nil {\n\t\t\treturn d.n, d.err\n\t\t}\n\t}\n\treturn len(p), nil\n}", "func (dsc *DefaultConsumerStatsCollector) PrintStats() {\n\tlog.Printf(\"Consumer stats: Consumed: [%d]\\n\", dsc.Consumed.Count())\n\tlog.Printf(\"Consumer stats: Delivered: [%d]\\n\", dsc.Delivered.Count())\n\tlog.Printf(\"Consumer stats: Processed: [%d]\\n\", dsc.Processed.Count())\n\tlog.Printf(\"Consumer stats: Batch Size: [%d]\\n\", dsc.BatchSize.Value())\n\tlog.Printf(\"Consumer stats: GetRecords Called: [%d]\\n\", dsc.GetRecordsCalled.Count())\n\tlog.Printf(\"Consumer stats: GetRecords Timeout: [%d]\\n\", dsc.GetRecordsTimeout.Count())\n\tlog.Printf(\"Consumer stats: GetRecords Read Timeout: [%d]\\n\", dsc.GetRecordsReadTimeout.Count())\n\tlog.Printf(\"Consumer stats: GetRecords Provisioned Throughput Exceeded: [%d]\\n\", dsc.ReadProvisionedThroughputExceeded.Count())\n\tlog.Printf(\"Consumer stats: Processed Duration (ns): [%d]\\n\", dsc.ProcessedDuration.Value())\n\tlog.Printf(\"Consumer stats: GetRecords Duration (ns): [%d]\\n\", dsc.GetRecordsDuration.Value())\n\tlog.Printf(\"Consumer stats: GetRecords Read Response Duration (ns): [%d]\\n\", dsc.GetRecordsReadResponseDuration.Value())\n\tlog.Printf(\"Consumer stats: GetRecords Unmarshal Duration (ns): [%d]\\n\", dsc.GetRecordsUnmarshalDuration.Value())\n\tlog.Printf(\"Consumer stats: Checkpoint Insert: [%d]\\n\", dsc.CheckpointInsert.Count())\n\tlog.Printf(\"Consumer stats: Checkpoint Done: [%d]\\n\", dsc.CheckpointDone.Count())\n\tlog.Printf(\"Consumer stats: Checkpoint Size: [%d]\\n\", dsc.CheckpointSize.Value())\n\tlog.Printf(\"Consumer stats: Checkpoint Sent: [%d]\\n\", dsc.CheckpointSent.Count())\n\tlog.Printf(\"Consumer stats: Checkpoint Success: [%d]\\n\", dsc.CheckpointSuccess.Count())\n\tlog.Printf(\"Consumer stats: Checkpoint Error: [%d]\\n\", dsc.CheckpointError.Count())\n}", "func Print(s stat.Stat) {\n\tw := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.AlignRight|tabwriter.Debug)\n\tfmt.Fprintln(w, \"Total\\tTests\\tPass\\tFail\\tSkip\\t\")\n\tfmt.Fprintf(w, \"%v\\t%v\\t%v\\t%v\\t%v\\t\\n\", resultTotal(s), s.Tests, s.Pass, s.Fail, s.Skip)\n\tw.Flush()\n\n\tfmt.Printf(\"Elapsed: %v\\n\", s.Elapsed)\n\tfmt.Printf(\"Packages without Tests (%v/%v):\\n%v \\n\", s.Packages, len(s.EmptyPackages), s.EmptyPackages)\n}", "func (b *Bench) String() string {\n\tprefix := \" \"\n\tvar buf bytes.Buffer\n\tpercentiles := []float64{5, 50, 70, 90, 95, 99, 99.9, 99.95, 99.99, 100}\n\n\tif b.rps <= 0 {\n\t\tfmt.Fprintf(&buf, \"Duration: %2.2fs, Concurrency: %d, Total runs: %d\\n\", b.timeTaken.Seconds(), b.concurrentRuns, b.calls)\n\t} else {\n\t\tfmt.Fprintf(&buf, \"Rate: %d calls/sec, Duration: %2.2fs, Concurrency: %d, Total runs: %d\\n\", b.rps, b.timeTaken.Seconds(), b.concurrentRuns, b.calls)\n\t}\n\n\tfor n, h := range b.timers {\n\t\tfmt.Fprintf(&buf, \"%s>>Timer: %s \\n\", prefix, n)\n\t\tfor _, p := range percentiles {\n\t\t\tfmt.Fprintf(&buf, \"%s%s%2.2fth percentile: %.2fms\\n\", prefix, prefix, p, float64(h.ValueAtQuantile(p))/1000000)\n\t\t}\n\t\tfmt.Fprintf(&buf, \"%s%sMean: %.2fms\\n\", prefix, prefix, float64(h.Mean())/1000000.0)\n\t}\n\tfor n, count := range b.counters {\n\t\tfmt.Fprintf(&buf, \"%s>>Counter: %s\\n\", prefix, n)\n\t\tfmt.Fprintf(&buf, \"%s%sValue: %d \\n\", prefix, prefix, count)\n\t}\n\treturn buf.String()\n}", "func (h *History) WriteFile(path string) error {\n\tfile, err := os.Create(path + \".csv\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\n\tw := bufio.NewWriter(file)\n\th.RLock()\n\tdefer h.RUnlock()\n\n\tsort.Sort(byTime(h.operations))\n\n\tlatency := 0.0\n\tthroughput := 0\n\ts := 1.0\n\tfor _, o := range h.operations {\n\t\tstart := float64(o.start) / 1000000000.0\n\t\tend := float64(o.end) / 1000000000.0\n\t\tfmt.Fprintf(w, \"%v,%v,%f,%f\\n\", o.input, o.output, start, end)\n\t\tlatency += end - start\n\t\tthroughput++\n\t\tif end > s {\n\t\t\tfmt.Fprintf(w, \"PerSecond %f %d\\n\", latency/float64(throughput)*1000.0, throughput)\n\t\t\tlatency = 0\n\t\t\tthroughput = 0\n\t\t\ts++\n\t\t}\n\n\t\t// fmt.Fprintln(w, o)\n\t}\n\n\t// for k, ops := range h.shard {\n\t// \tfmt.Fprintf(w, \"key=%d\\n\", k)\n\t// \tfor _, o := range ops {\n\t// \t\tfmt.Fprintln(w, o)\n\t// \t}\n\t// }\n\treturn w.Flush()\n}", "func (h *History) WriteFile(path string) error {\n\tfile, err := os.Create(path + \".csv\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\n\tw := bufio.NewWriter(file)\n\th.RLock()\n\tdefer h.RUnlock()\n\n\tsort.Sort(byTime(h.operations))\n\n\tlatency := 0.0\n\tthroughput := 0\n\ts := 1.0\n\tfor _, o := range h.operations {\n\t\tstart := float64(o.start) / 1000000000.0\n\t\tend := float64(o.end) / 1000000000.0\n\t\tfmt.Fprintf(w, \"%v,%v,%f,%f\\n\", o.input, o.output, start, end)\n\t\tlatency += end - start\n\t\tthroughput++\n\t\tif end > s {\n\t\t\tfmt.Fprintf(w, \"PerSecond %f %d\\n\", latency/float64(throughput)*1000.0, throughput)\n\t\t\tlatency = 0\n\t\t\tthroughput = 0\n\t\t\ts++\n\t\t}\n\n\t\t// fmt.Fprintln(w, o)\n\t}\n\n\t// for k, ops := range h.shard {\n\t// \tfmt.Fprintf(w, \"key=%d\\n\", k)\n\t// \tfor _, o := range ops {\n\t// \t\tfmt.Fprintln(w, o)\n\t// \t}\n\t// }\n\treturn w.Flush()\n}", "func (results WritableResults) Write() error {\n\tfor _, result := range results {\n\t\tif err := result.Write(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func printResults(finalCount map[string]int) {\n\tfor index, phrase := range sortedKeys(finalCount) {\n\t\tif index == 100 {\n\t\t\tbreak\n\t\t}\n\t\tfmt.Printf(\"%v: %v\\n\", phrase, finalCount[phrase])\n\t}\n}", "func (c *Printer) PrintBuckets() {\n\tfor {\n\t\tbucket, more := <-c.channel\n\t\tif more {\n\t\t\tfmt.Fprintf(out, \"%s\\n\", bucket)\n\t\t} else {\n\t\t\tc.done <- true\n\t\t\treturn\n\t\t}\n\t}\n}", "func (h *Health) Write(metrics []telegraf.Metric) error {\n\thealthy := true\n\tfor _, checker := range h.checkers {\n\t\tsuccess := checker.Check(metrics)\n\t\tif !success {\n\t\t\thealthy = false\n\t\t}\n\t}\n\n\th.setHealthy(healthy)\n\treturn nil\n}", "func writingWorker(\n workerIndex int,\n outputFolder string,\n in <-chan result,\n group *sync.WaitGroup,\n exportFunc io.Exporter) {\n\n defer group.Done()\n\n for result := range in {\n outputFile := path.Join(outputFolder, utils.SimpleRandomString(20))\n log.Printf(\"[worker:%d] saving file %s\", workerIndex, outputFile)\n if result.err != nil {\n log.Printf(result.err.Error())\n } else {\n query := result.collection.Query\n log.Printf(\n \"[worker:%d] exporting query results for '%s' into file '%s\",\n workerIndex, query, outputFile)\n if err := exportFunc(result.collection, outputFile); err != nil {\n log.Printf(err.Error())\n }\n }\n }\n\n log.Printf(\"[worker:%d] terminated\", workerIndex)\n}", "func (h *Histogram) String() string {\n\tvar strs []string\n\tfor _, b := range h.Buckets {\n\t\tstrs = append(strs, fmt.Sprintf(\"[%d,%d):%d\", b.Min, b.Max, b.Count))\n\t}\n\treturn h.Name + \": [\" + strings.Join(strs, \" \") + \"]; \" + fmt.Sprintf(\"sum %d\", h.Sum)\n}", "func (h *Histogram) Log(msg string, percentiles []float64) {\n\tvar b bytes.Buffer\n\tw := bufio.NewWriter(&b)\n\th.Print(w, msg, percentiles)\n\tw.Flush() // nolint: gas,errcheck\n\tlog.Infof(\"%s\", b.Bytes())\n}", "func (w *workTally) Print() {\n\ttable := tablewriter.NewWriter(os.Stdout)\n\ttable.SetHeader([]string{\"Issue ID\", \"Summary\", \"Time Spent\"})\n\ttable.SetFooter([]string{\"\", \"Total\", w.total.String()})\n\ttable.SetBorder(false)\n\tfor _, key := range w.sortedKeys() {\n\t\tentry := w.durationMap[key]\n\t\ttable.Append([]string{\n\t\t\tkey,\n\t\t\ttruncateString(entry.summary, 64),\n\t\t\tentry.duration.String(),\n\t\t})\n\t}\n\tfmt.Println(\"\")\n\ttable.Render()\n}", "func (s *StressReport) Print(t *testing.T) {\n\tfmt.Printf(\"----- Stress Report for %s -----\\n\", t.Name())\n\tw := tabwriter.NewWriter(os.Stdout, 0, 8, 1, '\\t', tabwriter.AlignRight)\n\tfmt.Fprintf(w, \"%s\\t\\t%d iterations\\t%15.4f ns/iteration\", s.Duration, s.Iteration, float64(s.Duration.Nanoseconds())/float64(s.Iteration))\n\tif s.Extras != nil {\n\t\tfor _, metric := range s.Extras {\n\t\t\tfmt.Fprintf(w, \"\\t%15.4f %s\", metric.Value, metric.Unit)\n\t\t}\n\t}\n\n\tif delta := s.Delta(); delta != 0 {\n\t\tfmt.Fprintf(w, \"\\t%15.4f %%iterations\", delta)\n\t}\n\n\tfmt.Fprintln(w)\n\tw.Flush()\n\n\tfmt.Println()\n\tfmt.Printf(\"----- Profiling Report CPU for %s -----\\n\", t.Name())\n\tfmt.Println(string(s.TopCPU))\n\tfmt.Println()\n\n\tfmt.Println()\n\tfmt.Printf(\"----- Profiling Report Memory for %s -----\\n\", t.Name())\n\tfmt.Println(string(s.TopMem))\n\tfmt.Println()\n}", "func PrintDiskStats() {\n\tfmt.Println(DiskStats)\n}", "func (w *Writer) Write() (err error) {\n\tfor _, space := range w.spaces {\n\t\tif err = w.writeSpace(space); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tlength, err := w.writeIndex()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn w.writeFooter(length)\n}", "func (m *StatsfsMetrics) Print() {\n\tfmt.Print(\"\\n####################################\\n\")\n\tfmt.Printf(\"StatsfsPath: %v\\n\\n\", m.StatsfsPath)\n\tfor subsysName, subsysMetrics := range m.Metrics {\n\t\tfmt.Println(\"------------------\")\n\t\tfmt.Printf(\"Statsfs metrics for subsystem %v:\\n\\n\", subsysName)\n\t\tsubsysMetrics.print()\n\t}\n\tfmt.Printf(\"####################################\\n\\n\")\n}", "func main() {\n// \tfmt.Println(os.Args[0])\n\t// Read the file and dump into the slice of structs\n\tdm := SummaryReport.ReadFile(infile)\n// \tfor _, v := range dm {\n\t\tfmt.Println(*dm[0])\n// \t}\n\t\n// \tCompute number of subjects by treatment group\n\t\t\n// \tTurn values into strings\n\t\t\n// \tCompute number of non-mising Age values by TG\n\t\t\n// \tTurn values into strings\n\t\t\n// \tCompute mean of age by TG and SD by TG\n\t\t\n// \tTurn Mean and SD values into strings\n\t\t\n// \tCompute median values of Age by TG\n\t\t\n// \tTurn median values into strings\n\t\t\n// \tCompute min values of Age by TG\n\t\t\n// \tTurn min values into strings\n\t\t\n// \tCompute max values of Age by TG\n\t\t\n// \tTurn max values into strings\n\t\n// \tNew Report \n\t\n\th := titles()\n\terr := SummaryReport.WriteReport(outfile, h)\n\n\tfmt.Println(err)\n// \t\n// \t\n// \t\n// \tpdf.AddPage()\n// // \tbasicTable()\n// \n// \terr := pdf.OutputFileAndClose(*outputFile)\n// \tfmt.Println(err)\n}", "func (w *PrometheusWriter) Write(metric model.Metric) error {\n\tduration := float64(metric.Duration / time.Millisecond)\n\treason := \"\"\n\tif metric.Error != \"\" {\n\t\treason = strings.SplitN(metric.Error, \":\", 2)[0]\n\t\treason = strings.ToLower(reason)\n\t\thealthCheckStatusGauge.With(prometheus.Labels{\n\t\t\t\"name\": metric.Name,\n\t\t}).Set(0)\n\t\thealthCheckErrorCounter.With(prometheus.Labels{\n\t\t\t\"name\": metric.Name,\n\t\t\t\"reason\": reason,\n\t\t}).Inc()\n\t} else {\n\t\thealthCheckStatusGauge.With(prometheus.Labels{\n\t\t\t\"name\": metric.Name,\n\t\t}).Set(1)\n\t}\n\thealthCheckResponseTimeGauge.With(prometheus.Labels{\n\t\t\"name\": metric.Name,\n\t}).Set(duration)\n\n\treturn nil\n}", "func printTable(out io.Writer, rows [][]string) {\n\ttw := tablewriter.NewWriter(out)\n\ttw.AppendBulk(rows)\n\ttw.Render()\n}", "func (s *Set) WritePrometheus(w io.Writer) {\n\t// Collect all the metrics in in-memory buffer in order to prevent from long locking due to slow w.\n\tvar bb bytes.Buffer\n\tlessFunc := func(i, j int) bool {\n\t\treturn s.a[i].name < s.a[j].name\n\t}\n\ts.mu.Lock()\n\tfor _, sm := range s.summaries {\n\t\tsm.updateQuantiles()\n\t}\n\tif !sort.SliceIsSorted(s.a, lessFunc) {\n\t\tsort.Slice(s.a, lessFunc)\n\t}\n\tsa := append([]*namedMetric(nil), s.a...)\n\ts.mu.Unlock()\n\n\t// Call marshalTo without the global lock, since certain metric types such as Gauge\n\t// can call a callback, which, in turn, can try calling s.mu.Lock again.\n\tfor _, nm := range sa {\n\t\tnm.metric.marshalTo(nm.name, &bb)\n\t}\n\tw.Write(bb.Bytes())\n}", "func (m *CloudPcBulkActionSummary) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n {\n err := writer.WriteInt32Value(\"failedCount\", m.GetFailedCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"inProgressCount\", m.GetInProgressCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"notSupportedCount\", m.GetNotSupportedCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteStringValue(\"@odata.type\", m.GetOdataType())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"pendingCount\", m.GetPendingCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteInt32Value(\"successfulCount\", m.GetSuccessfulCount())\n if err != nil {\n return err\n }\n }\n {\n err := writer.WriteAdditionalData(m.GetAdditionalData())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (self *TravellerBots) reportSummary(mp ModelParams) {\n\n\t// Compile results\n\tvar compiledBands []botStatsCompiled\n\tfor _,bot := range(self.bots) {\n\t\tcompiledBands = append(compiledBands, bot.stats.compile(bot.numInstances, mp.ReportDayDelta))\n\t}\n\t\n\t// Output CSV\n\tfn := filepath.Join(mp.WorkingFolder,\"bands.csv\")\n\tfh,_ := os.Create(fn)\n\tif fh == nil {\n\t\treturn\n\t}\n\tline:=\"Day\"\n\tfor i,_ := range(compiledBands) {\n\t\tline += fmt.Sprintf(\",refusedpercent_%d,cancelledpercent_%d,distance_%d\",i,i,i) \n\t}\n\tline +=\"\\n\"\n\tfh.WriteString(line)\n\tfor i:=0; i < len(compiledBands[0].lines); i++ {\n\t\tlineOut := fmt.Sprintf(\"%d,\",flap.Days(i+1) *mp.ReportDayDelta)\n\t\tfor _,compiled := range(compiledBands) {\n\t\t\tlineOut += compiled.lines[i]\n\t\t}\n\t\tlineOut=strings.TrimRight(lineOut,\",\")\n\t\tlineOut+=\"\\n\"\n\t\tfh.WriteString(lineOut)\n\t}\n\n\t// Output graphs\n\treportBandsDistance(compiledBands,mp)\n\treportBandsCancelled(compiledBands,mp)\n}", "func New(writer io.Writer, rate time.Duration) func(float64) error {\n\tvar (\n\t\ti int\n\t\tthrottle = time.Tick(rate)\n\t)\n\n\treturn func(pc float64) error {\n\t\t<-throttle\n\t\tif pc > 1 || pc < 0 {\n\t\t\treturn fmt.Errorf(\"percent %f invalid\", pc)\n\t\t}\n\t\t// Render text and padding.\n\t\tpl := 6 - len(strconv.Itoa(int(pc*1e+2)))\n\t\tstr := fmt.Sprintf(\"\\r%.2f%%%s\", pc*1e+2, strings.Repeat(\" \", pl))\n\t\t// Render bar.\n\t\tn := int(pc * 1e+2 / (float64(100) / float64(60)))\n\t\tstr += fmt.Sprintf(\"[%s%s]\", strings.Repeat(\"█\", n), strings.Repeat(\"-\", 60-n))\n\t\t// Render spinner.\n\t\tio.WriteString(writer, str+fmt.Sprintf(\" %c\", `-\\|/`[i%4]))\n\t\ti++\n\t\treturn nil\n\t}\n}", "func Stats(path, prefix string) {\n\tif _, err := os.Stat(path); os.IsNotExist(err) {\n\t\tfatal(err)\n\t\treturn\n\t}\n\n\tdb, err := bolt.Open(path, 0600, nil)\n\tif err != nil {\n\t\tfatal(err)\n\t\treturn\n\t}\n\tdefer db.Close()\n\n\terr = db.View(func(tx *bolt.Tx) error {\n\t\tvar s bolt.BucketStats\n\t\tvar count int\n\t\tvar prefix = []byte(prefix)\n\t\ttx.ForEach(func(name []byte, b *bolt.Bucket) error {\n\t\t\tif bytes.HasPrefix(name, prefix) {\n\t\t\t\ts.Add(b.Stats())\n\t\t\t\tcount += 1\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\t\tprintf(\"Aggregate statistics for %d buckets\\n\\n\", count)\n\n\t\tprintln(\"Page count statistics\")\n\t\tprintf(\"\\tNumber of logical branch pages: %d\\n\", s.BranchPageN)\n\t\tprintf(\"\\tNumber of physical branch overflow pages: %d\\n\", s.BranchOverflowN)\n\t\tprintf(\"\\tNumber of logical leaf pages: %d\\n\", s.LeafPageN)\n\t\tprintf(\"\\tNumber of physical leaf overflow pages: %d\\n\", s.LeafOverflowN)\n\n\t\tprintln(\"Tree statistics\")\n\t\tprintf(\"\\tNumber of keys/value pairs: %d\\n\", s.KeyN)\n\t\tprintf(\"\\tNumber of levels in B+tree: %d\\n\", s.Depth)\n\n\t\tprintln(\"Page size utilization\")\n\t\tprintf(\"\\tBytes allocated for physical branch pages: %d\\n\", s.BranchAlloc)\n\t\tvar percentage int\n\t\tif s.BranchAlloc != 0 {\n\t\t\tpercentage = int(float32(s.BranchInuse) * 100.0 / float32(s.BranchAlloc))\n\t\t}\n\t\tprintf(\"\\tBytes actually used for branch data: %d (%d%%)\\n\", s.BranchInuse, percentage)\n\t\tprintf(\"\\tBytes allocated for physical leaf pages: %d\\n\", s.LeafAlloc)\n\t\tpercentage = 0\n\t\tif s.LeafAlloc != 0 {\n\t\t\tpercentage = int(float32(s.LeafInuse) * 100.0 / float32(s.LeafAlloc))\n\t\t}\n\t\tprintf(\"\\tBytes actually used for leaf data: %d (%d%%)\\n\", s.LeafInuse, percentage)\n\n\t\tprintln(\"Bucket statistics\")\n\t\tprintf(\"\\tTotal number of buckets: %d\\n\", s.BucketN)\n\t\tpercentage = int(float32(s.InlineBucketN) * 100.0 / float32(s.BucketN))\n\t\tprintf(\"\\tTotal number on inlined buckets: %d (%d%%)\\n\", s.InlineBucketN, percentage)\n\t\tpercentage = 0\n\t\tif s.LeafInuse != 0 {\n\t\t\tpercentage = int(float32(s.InlineBucketInuse) * 100.0 / float32(s.LeafInuse))\n\t\t}\n\t\tprintf(\"\\tBytes used for inlined buckets: %d (%d%%)\\n\", s.InlineBucketInuse, percentage)\n\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\tfatal(err)\n\t\treturn\n\t}\n}", "func DumpTo(out io.Writer, format string) (int64, error) { return dc.DumpTo(out, format) }", "func (buckets HistogramBuckets) Table() string {\n\tif len(buckets) == 0 {\n\t\treturn \"\"\n\t}\n\tbuf := bytes.NewBuffer(nil)\n\ttb := tablewriter.NewWriter(buf)\n\ttb.SetAutoWrapText(false)\n\ttb.SetColWidth(1500)\n\ttb.SetCenterSeparator(\"*\")\n\ttb.SetAlignment(tablewriter.ALIGN_CENTER)\n\ttb.SetCaption(true, fmt.Sprintf(\"\t(%q scale)\", buckets[0].Scale))\n\ttb.SetHeader([]string{\"lower bound\", \"upper bound\", \"count\"})\n\tfor _, v := range buckets {\n\t\tlo := fmt.Sprintf(\"%f\", v.LowerBound)\n\t\tif v.Scale == \"milliseconds\" {\n\t\t\tlo = fmt.Sprintf(\"%.3f\", v.LowerBound)\n\t\t}\n\t\thi := fmt.Sprintf(\"%f\", v.UpperBound)\n\t\tif v.Scale == \"milliseconds\" {\n\t\t\thi = fmt.Sprintf(\"%.3f\", v.UpperBound)\n\t\t}\n\t\tif v.UpperBound == math.MaxFloat64 {\n\t\t\thi = \"math.MaxFloat64\"\n\t\t}\n\t\ttb.Append([]string{lo, hi, fmt.Sprintf(\"%d\", v.Count)})\n\t}\n\ttb.Render()\n\treturn buf.String()\n}", "func (p *Printer) Write(w io.Writer, err error) error {\n\talloc := p.pool.Get().(*printerAlloc)\n\n\tif err := p.write(w, alloc.s, err, &alloc.buf); err != nil {\n\t\t// do not return to the pool\n\t\treturn err\n\t}\n\n\talloc.s.Reset()\n\n\tp.pool.Put(alloc)\n\n\treturn nil\n}", "func (h *Hist) SaveImage(f string) {\n\tdata := plotter.Values(h.Counts)\n\n\tif h.Normalize {\n\t\tdata = h.NormCounts()\n\t}\n\n\tp, err := plot.New()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tp.Title.Text = h.Title\n\tp.Y.Label.Text = \"Count\"\n\tif h.Normalize {\n\t\tp.Y.Label.Text = \"Frequency\"\n\t}\n\n\tbins := make([]plotter.HistogramBin, len(h.BinStart))\n\tfor i, binStart := range h.BinStart {\n\t\tbins[i] = plotter.HistogramBin{binStart, h.BinEnd[i], data[i]}\n\t}\n\n\tph := &plotter.Histogram{\n\t\tBins: bins,\n\t\tWidth: h.DataMax - h.DataMin,\n\t\tFillColor: plotutil.Color(2),\n\t\tLineStyle: plotter.DefaultLineStyle,\n\t}\n\tph.LineStyle.Width = vg.Length(0.5)\n\tph.Color = plotutil.Color(0)\n\n\tp.Add(ph)\n\tdigits := strconv.Itoa(int(h.Precision))\n\tmodeStr := fmt.Sprintf(\" ApproxMode: %.\"+digits+\"f\", h.GetMode())\n\tinfo := strings.TrimRight(h.Info, \"\\n\") + modeStr\n\tp.X.Label.Text = info\n\n\tif err := p.Save(11.69*vg.Inch, 8.27*vg.Inch, f); err != nil {\n\t\tpanic(err)\n\t}\n}", "func WriteReport(w io.Writer, suites []*TestSuite, ctxLen int, quiet bool) error {\n\ttests, failed, skipped := 0, 0, 0\n\n\tfor _, s := range suites {\n\t\tif s.Failed() {\n\t\t\tfailed++\n\t\t\tif !quiet {\n\t\t\t\tif err := s.WriteDiff(w, ctxLen); err != nil {\n\t\t\t\t\treturn fmt.Errorf(\"couldn't write %q: %s\", s.Name+\".err\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t} else if s.Skipped() {\n\t\t\tskipped++\n\t\t}\n\t\ttests++\n\t}\n\n\tplural := \"s\"\n\tif tests == 1 {\n\t\tplural = \"\"\n\t}\n\n\t_, err := fmt.Fprintf(w, \"# Ran %d test%s, %d skipped, %d failed.\\n\", tests, plural, skipped, failed)\n\treturn err\n}", "func (S *Simulation) PrintHistory(file io.Writer) {\n\tfor group := range S.agentsByGroup {\n\t\tS.PrintGroupHistory(file, group)\n\t}\n}", "func (c *Collector) Percentile(percent float64) float64 {\n\tc.Lock()\n\tdefer c.Unlock()\n\ttargetCount := int(percent * float64(c.Count) / 100)\n\n\tcount := 0\n\tfor _, b := range c.Buckets {\n\t\tcount += b.Count\n\t\tif count >= targetCount {\n\t\t\treturn b.Max\n\t\t}\n\t}\n\n\treturn c.Max\n}", "func PublishHistogram(ctx context.Context, key string, value float64) {\n\n\t// Spans are not processed by the collector until the span ends, so to prevent any delay\n\t// in processing the stats when the current span is long-lived we create a new span for every call.\n\t// suffix the span name with SpannameSuffixDummy to denote that it is used only to hold a metric and isn't itself of any interest\n\tspan, ctx := opentracing.StartSpanFromContext(ctx, \"histogram_metrics\"+SpannameSuffixDummy)\n\tdefer span.Finish()\n\n\t// The field name we use is the metric name prepended with FieldnamePrefixHistogram to designate that it is a Prometheus histogram metric\n\t// The collector will replace that prefix with \"fn_\" and use the result as the Prometheus metric name.\n\tfieldname := FieldnamePrefixHistogram + key\n\tspan.LogFields(log.Float64(fieldname, value))\n}", "func (c *Client) Histogram(stat string, value int, rate float64) error {\n\treturn c.send(stat, rate, \"%d|ms\", value)\n}", "func (s *Scanner) WriteTopN(n int, w io.Writer) {\n\tfor i := 0; i < n && i < len(s.Freqs); i++ {\n\t\tfmt.Fprintln(w, s.Freqs[i])\n\t}\n}", "func (writer *testWriter) Write(b []byte) (int, error) {\n\tfmt.Print(\"[OUT] > \")\n\treturn os.Stdout.Write(b)\n}", "func (m *MongoDB) Write(r *report.Report) error {\n\tm.changes <- agents.Change{MetricsBucket: r.Buckets}\n\treturn nil\n}" ]
[ "0.64179385", "0.6303539", "0.61359304", "0.5873298", "0.56145483", "0.53762704", "0.5344412", "0.52478766", "0.5165365", "0.51300883", "0.5050418", "0.49999088", "0.497158", "0.49383005", "0.49119425", "0.49078703", "0.48954996", "0.48871756", "0.48845878", "0.48510522", "0.48477495", "0.48327446", "0.48311746", "0.48165378", "0.4767023", "0.4759347", "0.47359937", "0.47130275", "0.4677794", "0.46740982", "0.4655146", "0.46477365", "0.46376607", "0.46212012", "0.46195847", "0.4612671", "0.46026033", "0.46001118", "0.4583494", "0.4567324", "0.4557615", "0.455371", "0.45375326", "0.45270237", "0.45269954", "0.4520635", "0.451682", "0.45039448", "0.44985047", "0.4483703", "0.44714293", "0.44536307", "0.44264522", "0.4415632", "0.44042963", "0.44011885", "0.4394385", "0.43928617", "0.43907824", "0.43904266", "0.4385966", "0.4363723", "0.4353598", "0.43481213", "0.4337952", "0.43309864", "0.43151855", "0.43151855", "0.43112907", "0.42988873", "0.42985827", "0.42964548", "0.42917657", "0.42917112", "0.42911118", "0.42839476", "0.42830184", "0.4283011", "0.42788243", "0.42746177", "0.42671308", "0.42612073", "0.42538714", "0.42491335", "0.42421803", "0.42357546", "0.4229016", "0.42126867", "0.42118162", "0.42097366", "0.4203535", "0.41999042", "0.41992566", "0.41974005", "0.41923103", "0.41905794", "0.41864535", "0.41832745", "0.41818073", "0.41714585" ]
0.7136059
0
Log Logs the histogram to the counter.
func (h *Histogram) Log(msg string, percentiles []float64) { var b bytes.Buffer w := bufio.NewWriter(&b) h.Print(w, msg, percentiles) w.Flush() // nolint: gas,errcheck log.Infof("%s", b.Bytes()) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *LoggerClient) Histogram(name string, value float64) {\n\tc.print(\"Histogram\", name, value, value)\n}", "func (c *Counter) Log(msg string) {\n\tlog.Infof(\"%s : count %d avg %.8g +/- %.4g min %g max %g sum %.9g\",\n\t\tmsg, c.Count, c.Avg(), c.StdDev(), c.Min, c.Max, c.Sum)\n}", "func (a *Aggregator) Log(l *logrus.Entry, round uint64, step uint8) {\n\ttarget := a.handler.Quorum(round)\n\n\tfor hash, sv := range a.voteSets {\n\t\ttotal := sv.Cluster.TotalOccurrences()\n\n\t\tl.WithField(\"hash\", util.StringifyBytes([]byte(hash))).\n\t\t\tWithField(\"total\", total).\n\t\t\tWithField(\"round\", round).\n\t\t\tWithField(\"step\", step).\n\t\t\tWithField(\"quorum_target\", target).Info()\n\t}\n}", "func (c *Client) Histogram(stat string, value int, rate float64) error {\n\treturn c.send(stat, rate, \"%d|ms\", value)\n}", "func Histogram(name string, requestTime float64, tags []string, rate float64) {\n\tif ddog == nil {\n\t\tlog.Error(\"datadog client is not initialized\")\n\t\treturn\n\t}\n\n\terr := ddog.Client.Histogram(name, requestTime, tags, rate)\n\tif err != nil {\n\t\tlog.WithFields(logrus.Fields{\n\t\t\t\"error\": err,\n\t\t\t\"name\": name,\n\t\t}).Error(\"Failed to send histogram data to datadog\")\n\t}\n}", "func (c *Context) Histogram(stat string, value float64) {\n\tfor _, sink := range c.sinks {\n\t\tsink.Histogram(c, stat, value)\n\t}\n}", "func (_m *Reporter) Histogram(name string, value float64, tags ...monitoring.Tag) {\n\t_va := make([]interface{}, len(tags))\n\tfor _i := range tags {\n\t\t_va[_i] = tags[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, name, value)\n\t_ca = append(_ca, _va...)\n\t_m.Called(_ca...)\n}", "func (c *Client) Histogram(name string, value int) error {\n\treturn c.DataDog.Histogram(name, float64(value), nil, 1)\n}", "func (h *Histogram) Record(v float64) {\n\th.RecordN(v, 1)\n}", "func (c *StatsClient) Histogram(name string, value float64) {\n\tif err := c.client.Histogram(name, value, c.tags, Rate); err != nil {\n\t\tc.logger().Printf(\"datadog.StatsClient.Histogram error: %s\", err)\n\t}\n}", "func (d TestSink) Histogram(c *telemetry.Context, stat string, value float64) {\n\td[stat] = TestMetric{\"Histogram\", value, c.Tags()}\n}", "func (datadog *Datadog) Histogram(name string, startTime time.Time, tags []string) error {\n\telapsedTime := time.Since(startTime).Seconds() * 1000\n\terr := datadog.client.Histogram(name, elapsedTime, tags, float64(1))\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func Log(r Registry, freq time.Duration, l Logger) {\n\tLogScaled(r, freq, time.Nanosecond, l)\n}", "func PublishHistogram(ctx context.Context, key string, value float64) {\n\n\t// Spans are not processed by the collector until the span ends, so to prevent any delay\n\t// in processing the stats when the current span is long-lived we create a new span for every call.\n\t// suffix the span name with SpannameSuffixDummy to denote that it is used only to hold a metric and isn't itself of any interest\n\tspan, ctx := opentracing.StartSpanFromContext(ctx, \"histogram_metrics\"+SpannameSuffixDummy)\n\tdefer span.Finish()\n\n\t// The field name we use is the metric name prepended with FieldnamePrefixHistogram to designate that it is a Prometheus histogram metric\n\t// The collector will replace that prefix with \"fn_\" and use the result as the Prometheus metric name.\n\tfieldname := FieldnamePrefixHistogram + key\n\tspan.LogFields(log.Float64(fieldname, value))\n}", "func (g *GCMMessageHandler) LogStats() {\n\tl := g.Logger.WithFields(log.Fields{\n\t\t\"method\": \"logStats\",\n\t\t\"interval(ns)\": g.LogStatsInterval,\n\t})\n\n\tticker := time.NewTicker(g.LogStatsInterval)\n\tfor range ticker.C {\n\t\tapnsResMutex.Lock()\n\t\tif g.sentMessages > 0 || g.responsesReceived > 0 || g.ignoredMessages > 0 || g.successesReceived > 0 || g.failuresReceived > 0 {\n\t\t\tl.WithFields(log.Fields{\n\t\t\t\t\"sentMessages\": g.sentMessages,\n\t\t\t\t\"responsesReceived\": g.responsesReceived,\n\t\t\t\t\"ignoredMessages\": g.ignoredMessages,\n\t\t\t\t\"successesReceived\": g.successesReceived,\n\t\t\t\t\"failuresReceived\": g.failuresReceived,\n\t\t\t}).Info(\"flushing stats\")\n\t\t\tg.sentMessages = 0\n\t\t\tg.responsesReceived = 0\n\t\t\tg.successesReceived = 0\n\t\t\tg.ignoredMessages = 0\n\t\t\tg.failuresReceived = 0\n\t\t}\n\t\tapnsResMutex.Unlock()\n\t}\n}", "func (m *Metrics) Log(l Logger, all bool) {\n\tif !m.config.Enable {\n\t\treturn\n\t}\n\n\tw := writer{l, all}\n\n\tenc := expfmt.NewEncoder(w, expfmt.FmtText)\n\n\tmfs, err := prometheus.DefaultGatherer.Gather()\n\tif err != nil {\n\t\tfmt.Println(\"error gathering metrics:\", err)\n\t\treturn\n\t}\n\n\tfor _, mf := range mfs {\n\t\tif err := enc.Encode(mf); err != nil {\n\t\t\tfmt.Println(\"error encoding and sending metric family:\", err)\n\t\t\treturn\n\t\t}\n\t}\n}", "func (h *Histogram) record(v float64, count int) {\n\t// Scaled value to bucketize - we subtract epsilon because the interval\n\t// is open to the left ] start, end ] so when exactly on start it has\n\t// to fall on the previous bucket. TODO add boundary tests\n\tscaledVal := (v-h.Offset)/h.Divider - 0.0001\n\tvar idx int\n\tif scaledVal <= firstValue {\n\t\tidx = 0\n\t} else if scaledVal > lastValue {\n\t\tidx = numBuckets - 1 // last bucket is for > last value\n\t} else {\n\t\t// else we look it up\n\t\tidx = lookUpIdx(int(scaledVal))\n\t}\n\th.Hdata[idx] += int32(count)\n}", "func (te *TelemetryEmitter) emitHistogram(metric Metric, timestamp time.Time) error {\n\thist, ok := metric.value.(*dto.Histogram)\n\tif !ok {\n\t\treturn fmt.Errorf(\"unknown histogram metric type for %q: %T\", metric.name, metric.value)\n\t}\n\n\tif m, ok := te.deltaCalculator.CountMetric(metric.name+\".sum\", metric.attributes, hist.GetSampleSum(), timestamp); ok {\n\t\tte.harvester.RecordMetric(m)\n\t}\n\n\tmetricName := metric.name + \".buckets\"\n\tbuckets := make(histogram.Buckets, 0, len(hist.Bucket))\n\tfor _, b := range hist.GetBucket() {\n\t\tupperBound := b.GetUpperBound()\n\t\tcount := float64(b.GetCumulativeCount())\n\t\tif !math.IsInf(upperBound, 1) {\n\t\t\tbucketAttrs := copyAttrs(metric.attributes)\n\t\t\tbucketAttrs[\"histogram.bucket.upperBound\"] = upperBound\n\t\t\tif m, ok := te.deltaCalculator.CountMetric(metricName, bucketAttrs, count, timestamp); ok {\n\t\t\t\tte.harvester.RecordMetric(m)\n\t\t\t}\n\t\t}\n\t\tbuckets = append(\n\t\t\tbuckets,\n\t\t\thistogram.Bucket{\n\t\t\t\tUpperBound: upperBound,\n\t\t\t\tCount: count,\n\t\t\t},\n\t\t)\n\t}\n\n\tvar results error\n\tmetricName = metric.name + \".percentiles\"\n\tfor _, p := range te.percentiles {\n\t\tv, err := histogram.Percentile(p, buckets)\n\t\tif err != nil {\n\t\t\tif results == nil {\n\t\t\t\tresults = err\n\t\t\t} else {\n\t\t\t\tresults = fmt.Errorf(\"%v: %w\", err, results)\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\n\t\tpercentileAttrs := copyAttrs(metric.attributes)\n\t\tpercentileAttrs[\"percentile\"] = p\n\t\tte.harvester.RecordMetric(telemetry.Gauge{\n\t\t\tName: metricName,\n\t\t\tAttributes: percentileAttrs,\n\t\t\tValue: v,\n\t\t\tTimestamp: timestamp,\n\t\t})\n\t}\n\n\treturn results\n}", "func (info HistogramInt64) Record(e *Config, key *keys.Int64) {\n\tdata := &HistogramInt64Data{Info: &info, key: key}\n\te.subscribe(key, data.record)\n}", "func (c *Client) LogStats(out io.Writer) {\n\tif !c.LoggingStats {\n\t\treturn\n\t}\n\n\tfmt.Fprintf(out, \"concurrent=%d time=%d version=%s\\n\", c.ConcurrentTransfers, time.Now().Unix(), UserAgent)\n\n\tfor key, responses := range c.transferBuckets {\n\t\tfor _, response := range responses {\n\t\t\tstats := c.transfers[response]\n\t\t\tfmt.Fprintf(out, \"key=%s reqheader=%d reqbody=%d resheader=%d resbody=%d restime=%d status=%d url=%s\\n\",\n\t\t\t\tkey,\n\t\t\t\tstats.requestStats.HeaderSize,\n\t\t\t\tstats.requestStats.BodySize,\n\t\t\t\tstats.responseStats.HeaderSize,\n\t\t\t\tstats.responseStats.BodySize,\n\t\t\t\tstats.responseStats.Stop.Sub(stats.responseStats.Start).Nanoseconds(),\n\t\t\t\tresponse.StatusCode,\n\t\t\t\tresponse.Request.URL)\n\t\t}\n\t}\n}", "func (r *reporter) Log(args ...interface{}) {\n\tr.logs.log(fmt.Sprint(args...))\n}", "func (n *Node) LogMetrics(ctx context.Context) {\n\tn.Metrics = make(chan Metrics)\n\tgo func() {\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn\n\t\t\tcase m := <-n.Metrics:\n\t\t\t\tlog.Printf(\"%s\", JSON(m))\n\t\t\t}\n\t\t}\n\t}()\n}", "func (h *timeHistogram) record(duration int64) {\n\t// If the duration is negative, capture that in underflow.\n\tif duration < 0 {\n\t\th.underflow.Add(1)\n\t\treturn\n\t}\n\t// bucketBit is the target bit for the bucket which is usually the\n\t// highest 1 bit, but if we're less than the minimum, is the highest\n\t// 1 bit of the minimum (which will be zero in the duration).\n\t//\n\t// bucket is the bucket index, which is the bucketBit minus the\n\t// highest bit of the minimum, plus one to leave room for the catch-all\n\t// bucket for samples lower than the minimum.\n\tvar bucketBit, bucket uint\n\tif l := sys.Len64(uint64(duration)); l < timeHistMinBucketBits {\n\t\tbucketBit = timeHistMinBucketBits\n\t\tbucket = 0 // bucketBit - timeHistMinBucketBits\n\t} else {\n\t\tbucketBit = uint(l)\n\t\tbucket = bucketBit - timeHistMinBucketBits + 1\n\t}\n\t// If the bucket we computed is greater than the number of buckets,\n\t// count that in overflow.\n\tif bucket >= timeHistNumBuckets {\n\t\th.overflow.Add(1)\n\t\treturn\n\t}\n\t// The sub-bucket index is just next timeHistSubBucketBits after the bucketBit.\n\tsubBucket := uint(duration>>(bucketBit-1-timeHistSubBucketBits)) % timeHistNumSubBuckets\n\th.counts[bucket*timeHistNumSubBuckets+subBucket].Add(1)\n}", "func (bench *Stopwatch) Histogram(binCount int) *Histogram {\n\tbench.mustBeCompleted()\n\n\topts := defaultOptions\n\topts.BinCount = binCount\n\n\treturn NewDurationHistogram(bench.Durations(), &opts)\n}", "func (info HistogramFloat64) Record(e *Config, key *keys.Float64) {\n\tdata := &HistogramFloat64Data{Info: &info, key: key}\n\te.subscribe(key, data.record)\n}", "func (r *Recorder) Histogram(ctx context.Context, tconn *chrome.TestConn) ([]*Histogram, error) {\n\tnames := r.names()\n\n\ts, err := GetHistograms(ctx, tconn, names)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to get snapshot\")\n\t}\n\n\treturn DiffHistograms(r.snapshot, s)\n}", "func (lx *LXRHash) Log(msg string) {\n\tif lx.verbose {\n\t\tfmt.Println(msg)\n\t}\n}", "func (e *Executor) LogProgress(ctx context.Context, target Target) {\n\tlogProgress(ctx, target, nil)\n}", "func (hm *HM) Log(f string, a ...interface{}) {\n\ts := fmt.Sprintf(\"[%d]-\", hm.pid) + fmt.Sprintf(f, a...) + \"\\n\"\n\tLogChan <- s\n}", "func (s *fseEncoder) Histogram() *[256]uint32 {\n\treturn &s.count\n}", "func (u *Utils) LogMetrics(startTime time.Time, handler string, httpCode *int) {\n\ttimeTaken := time.Since(startTime).Seconds()\n\tif u.options.PrometheusRequestsVector != nil {\n\t\tu.options.PrometheusRequestsVector.WithLabelValues(fmt.Sprintf(\"%d\", *httpCode)).Observe(timeTaken)\n\t}\n\tif u.options.PrometheusHandlersVector != nil {\n\t\tu.options.PrometheusHandlersVector.WithLabelValues(fmt.Sprintf(\"%d\", *httpCode), handler).Observe(timeTaken)\n\t}\n}", "func (r *rawHist) record(t int64, v float64) {\n\tr.mutex.Lock()\n\tdefer r.mutex.Unlock()\n\tr.buf = append(r.buf, &pb.Sample{\n\t\tDatetime: t,\n\t\tValue: v,\n\t})\n\tif int32(len(r.buf)) >= GetRawHistBufLimit() {\n\t\tbuf := r.buf\n\t\tr.buf = make([]*pb.Sample, 0, GetRawHistBufLimit()/2)\n\t\tgo r.assembleAndSend(buf)\n\t}\n}", "func (cm *customMetrics) ObserveHistogram(histogram string, observation float64) {\n\n\tcm.histograms[histogram].Observe(observation)\n}", "func (s *Service) log(msg string) {\n\ts.lastLog = msg\n\ts.logwin.Clear()\n\ts.logwin.ColorOn(2)\n\ts.logwin.Box(0, 0)\n\ts.logwin.ColorOff(2)\n\ts.logwin.ColorOn(1)\n\ts.logwin.MovePrint(0, 0, msg)\n\ts.logwin.ColorOff(1)\n\ts.logwin.Refresh()\n}", "func (w *NamedHistogram) Record(elapsed time.Duration) {\n\tmaxLatency := time.Duration(w.mu.current.HighestTrackableValue())\n\tif elapsed < minLatency {\n\t\telapsed = minLatency\n\t} else if elapsed > maxLatency {\n\t\telapsed = maxLatency\n\t}\n\n\tw.mu.Lock()\n\terr := w.mu.current.RecordValue(elapsed.Nanoseconds())\n\tw.mu.Unlock()\n\n\tif err != nil {\n\t\t// Note that a histogram only drops recorded values that are out of range,\n\t\t// but we clamp the latency value to the configured range to prevent such\n\t\t// drops. This code path should never happen.\n\t\tpanic(fmt.Sprintf(`%s: recording value: %s`, w.name, err))\n\t}\n}", "func (ac *Accumulator) AddHistogram(measurement string, fields map[string]interface{},\n\ttags map[string]string, t ...time.Time) {\n\t// as of right now metric always returns a nil error\n\tm, _ := metric.New(measurement, tags, fields, getTime(t), telegraf.Histogram)\n\tac.AddMetric(m)\n}", "func NewHistogramLog(name string, observation float64, labels ...string) MetricsLogger {\n\treturn &histogram{\n\t\tname: name,\n\t\tobservation: observation,\n\t\tlabels: labels,\n\t}\n}", "func (b *BandwidthCollector) LogSentMessage(int64) {}", "func (r *JobsService) Histogram(gethistogramrequest *GetHistogramRequest) *JobsHistogramCall {\n\tc := &JobsHistogramCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.gethistogramrequest = gethistogramrequest\n\treturn c\n}", "func (svr *Saver) LogCacheStats(localCount, errCount int) {\n\tstats := svr.stats.Copy() // Get a copy\n\tlog.Printf(\"Cache info total %d local %d same %d diff %d new %d err %d\\n\",\n\t\tstats.TotalCount+(int64)(localCount), localCount,\n\t\tstats.TotalCount-((int64)(errCount)+stats.NewCount+stats.DiffCount+(int64)(localCount)),\n\t\tstats.DiffCount, stats.NewCount, errCount)\n}", "func (e *Huobi) Log(msgs ...interface{}) {\n\te.logger.Log(constant.INFO, \"\", 0.0, 0.0, msgs...)\n}", "func (s *DBStore) LogStats() {\n\tvar requestCount, statisticCount int\n\ts.db.Table(\"requests\").Count(&requestCount)\n\ts.db.Table(\"statistics\").Count(&statisticCount)\n\tdbLogger.Infof(\"Cached requests: %d. Statistic entries count: %d\", requestCount, statisticCount)\n}", "func (h *Histogram) Add(value int64) {\n\tfor i := range h.labels {\n\t\tif i == len(h.labels)-1 || value <= h.cutoffs[i] {\n\t\t\th.buckets[i].Add(1)\n\t\t\th.total.Add(value)\n\t\t\tbreak\n\t\t}\n\t}\n\tif h.hook != nil {\n\t\th.hook(value)\n\t}\n\tif defaultStatsdHook.histogramHook != nil && h.name != \"\" {\n\t\tdefaultStatsdHook.histogramHook(h.name, value)\n\t}\n}", "func (b *BandwidthCollector) LogRecvMessage(int64) {}", "func (h LogHook) Run(e *zerolog.Event, level zerolog.Level, msg string) {\n\t// Send log type metric\n\th.metric.RecordByLogTypes(level.String())\n\t// Send version in each log entry\n\te.Str(\"version\", Version)\n}", "func (hlt *HttpLogTracker) AddLog(hl *HttpLog) {\n\tsection := hl.GetSection()\n\tif !hlt.ContainsSection(section) {\n\t\t// Create new bucket of count 1, or add to existing\n\t\tvar bucket *list.Element\n\t\tif hlt.NumSections() == 0 || hlt.sections.Front().Value.(*sectionBucket).numLogs != 1 {\n\t\t\tbucket = hlt.sections.PushFront(&sectionBucket {\n\t\t\t\tnumLogs: 1,\n\t\t\t\tsections: map[string]bool{section: true},\n\t\t\t})\n\t\t} else {\n\t\t\tbucket = hlt.sections.Front();\n\t\t\tbucket.Value.(*sectionBucket).sections[section] = true\n\t\t}\n\t\t// Create new stats\n\t\thlt.statsBySection[section] = &SectionStats{\n\t\t\tSection: section,\n\t\t\tNumLogs: 1,\n\t\t\tNumLogsByMethod: map[string]uint64{hl.Method: 1},\n\t\t\tNumLogsByStatus: map[uint16]uint64{hl.Status: 1},\n\t\t\tbucket: bucket,\n\t\t}\n\t} else {\n\t\t// Update stats\n\t\tstats := hlt.statsBySection[section]\n\t\tstats.addLog(hl)\n\t\t// Get next bucket, or create new next bucket, and add to it\n\t\tvar nextBucket *list.Element\n\t\tif stats.bucket.Next() == nil || stats.bucket.Next().Value.(*sectionBucket).numLogs != stats.NumLogs {\n\t\t\tnextBucket = hlt.sections.InsertAfter(&sectionBucket{\n\t\t\t\tnumLogs: stats.NumLogs,\n\t\t\t\tsections: map[string]bool{section: true},\n\t\t\t}, stats.bucket)\n\t\t} else {\n\t\t\tnextBucket = stats.bucket.Next()\n\t\t\tnextBucket.Value.(*sectionBucket).sections[section] = true\n\t\t}\n\t\t// Remove from existing bucket, deleting existing bucket if empty\n\t\tdelete(stats.bucket.Value.(*sectionBucket).sections, section)\n\t\tif len(stats.bucket.Value.(*sectionBucket).sections) == 0 {\n\t\t\thlt.sections.Remove(stats.bucket)\n\t\t}\n\t\t// Set new bucket on stats struct\n\t\tstats.bucket = nextBucket\n\t}\n}", "func (t *Tracker) Log() {\n\tlog.Printf(\n\t\t\"track time:\\n\"+\n\t\t\t\" - label [%v]\\n\"+\n\t\t\t\" - run at [%v]\\n\"+\n\t\t\t\" - finish at [%v]\\n\"+\n\t\t\t\" - duration [%v]\\n\",\n\t\tt.Label,\n\t\tt.Run,\n\t\tt.Finish,\n\t\tt.Duration,\n\t)\n}", "func (h *Hist) SaveImage(f string) {\n\tdata := plotter.Values(h.Counts)\n\n\tif h.Normalize {\n\t\tdata = h.NormCounts()\n\t}\n\n\tp, err := plot.New()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tp.Title.Text = h.Title\n\tp.Y.Label.Text = \"Count\"\n\tif h.Normalize {\n\t\tp.Y.Label.Text = \"Frequency\"\n\t}\n\n\tbins := make([]plotter.HistogramBin, len(h.BinStart))\n\tfor i, binStart := range h.BinStart {\n\t\tbins[i] = plotter.HistogramBin{binStart, h.BinEnd[i], data[i]}\n\t}\n\n\tph := &plotter.Histogram{\n\t\tBins: bins,\n\t\tWidth: h.DataMax - h.DataMin,\n\t\tFillColor: plotutil.Color(2),\n\t\tLineStyle: plotter.DefaultLineStyle,\n\t}\n\tph.LineStyle.Width = vg.Length(0.5)\n\tph.Color = plotutil.Color(0)\n\n\tp.Add(ph)\n\tdigits := strconv.Itoa(int(h.Precision))\n\tmodeStr := fmt.Sprintf(\" ApproxMode: %.\"+digits+\"f\", h.GetMode())\n\tinfo := strings.TrimRight(h.Info, \"\\n\") + modeStr\n\tp.X.Label.Text = info\n\n\tif err := p.Save(11.69*vg.Inch, 8.27*vg.Inch, f); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (bi *blockImporter) logProgress() {\n\tbi.receivedLogBlocks++\n\n\tnow := time.Now()\n\tduration := now.Sub(bi.lastLogTime)\n\tif duration < time.Second*time.Duration(cfg.Progress) {\n\t\treturn\n\t}\n\n\t// Truncate the duration to 10s of milliseconds.\n\tdurationMillis := int64(duration / time.Millisecond)\n\ttDuration := 10 * time.Millisecond * time.Duration(durationMillis/10)\n\n\t// Log information about new block height.\n\tblockStr := \"blocks\"\n\tif bi.receivedLogBlocks == 1 {\n\t\tblockStr = \"block\"\n\t}\n\ttxStr := \"transactions\"\n\tif bi.receivedLogTx == 1 {\n\t\ttxStr = \"transaction\"\n\t}\n\tlog.Infof(\"Processed %d %s in the last %s (%d %s, height %d, %s)\",\n\t\tbi.receivedLogBlocks, blockStr, tDuration, bi.receivedLogTx,\n\t\ttxStr, bi.lastHeight, bi.lastBlockTime)\n\n\tbi.receivedLogBlocks = 0\n\tbi.receivedLogTx = 0\n\tbi.lastLogTime = now\n}", "func (c *Console) handleLog(entries <-chan *log.Entry) {\n\n\tvar e *log.Entry\n\tvar color log.ANSIEscSeq\n\tvar l int\n\n\tfor e = range entries {\n\n\t\tl = len(e.Fields)\n\t\tcolor = c.colors[e.Level]\n\n\t\tif c.miniTimestamp {\n\t\t\tif l == 0 {\n\t\t\t\tfmt.Fprintf(c.writer, \"%s%6s%s[%04d] %s\", color, e.Level, c.ansiReset, c.parseMiniTimestamp(), e.Message)\n\t\t\t} else {\n\t\t\t\tfmt.Fprintf(c.writer, \"%s%6s%s[%04d] %-25s\", color, e.Level, c.ansiReset, c.parseMiniTimestamp(), e.Message)\n\t\t\t}\n\t\t} else {\n\t\t\tif l == 0 {\n\t\t\t\tfmt.Fprintf(c.writer, \"%s%6s%s[%s] %s\", color, e.Level, c.ansiReset, e.Timestamp.Format(c.timestampFormat), e.Message)\n\t\t\t} else {\n\t\t\t\tfmt.Fprintf(c.writer, \"%s%6s%s[%s] %-25s\", color, e.Level, c.ansiReset, e.Timestamp.Format(c.timestampFormat), e.Message)\n\t\t\t}\n\t\t}\n\n\t\tfor _, f := range e.Fields {\n\t\t\tfmt.Fprintf(c.writer, \" %s%s%s=%v\", color, f.Key, c.ansiReset, f.Value)\n\t\t}\n\n\t\tfmt.Fprintln(c.writer)\n\n\t\te.WG.Done()\n\t}\n}", "func (p PollerJob)Log(args...interface{}){\n\tlog.Println(args...)\n}", "func (c *Aggregator) Histogram() (aggregation.Buckets, error) {\n\treturn aggregation.Buckets{\n\t\tBoundaries: c.boundaries,\n\t\tCounts: c.state.bucketCounts,\n\t}, nil\n}", "func initLogLut() {\n l2Bval := math.Log(2.0)\n\n numSegs := pageSize / hintSize\n\n for ind := 1; ind < numSegs; ind++ {\n logLut[ind] = (math.Log(float64(ind)) / l2Bval)\n }\n}", "func LogScaled(r Registry, freq time.Duration, scale time.Duration, l Logger) {\n\tch := make(chan interface{})\n\tgo func(channel chan interface{}) {\n\t\tfor _ = range time.Tick(freq) {\n\t\t\tchannel <- struct{}{}\n\t\t}\n\t}(ch)\n\tLogScaledOnCue(r, ch, scale, l)\n}", "func (h *Histogram) Transfer(src *Histogram) {\n\tif src.Count == 0 {\n\t\treturn\n\t}\n\tif h.Count == 0 {\n\t\th.CopyFrom(src)\n\t\tsrc.Reset()\n\t\treturn\n\t}\n\th.copyHDataFrom(src)\n\th.Counter.Transfer(&src.Counter)\n\tsrc.Reset()\n}", "func logMetric(level int, key string, value string, unit string) {\n\tif level < logLevel {\n\t\tfmt.Fprintf(&buf, \"METRIC|%d|%s|%s|%s\\n\", level, key, value, unit)\n\t}\n}", "func (hist *Histogram) WriteTo(w io.Writer) (int64, error) {\n\twritten, err := hist.WriteStatsTo(w)\n\tif err != nil {\n\t\treturn written, err\n\t}\n\n\t// TODO: use consistently single unit instead of multiple\n\tmaxCountLength := 3\n\tfor i := range hist.Bins {\n\t\tx := (int)(math.Ceil(math.Log10(float64(hist.Bins[i].Count + 1))))\n\t\tif x > maxCountLength {\n\t\t\tmaxCountLength = x\n\t\t}\n\t}\n\n\tvar n int\n\tfor _, bin := range hist.Bins {\n\t\tif bin.andAbove {\n\t\t\tn, err = fmt.Fprintf(w, \" %10v+[%[2]*[3]v] \", time.Duration(round(bin.Start, 3)), maxCountLength, bin.Count)\n\t\t} else {\n\t\t\tn, err = fmt.Fprintf(w, \" %10v [%[2]*[3]v] \", time.Duration(round(bin.Start, 3)), maxCountLength, bin.Count)\n\t\t}\n\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\n\t\twidth := float64(hist.Width) * bin.Width\n\t\tfrac := width - math.Trunc(width)\n\n\t\tn, err = io.WriteString(w, strings.Repeat(\"█\", int(width)))\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\n\t\tif frac > 0.5 {\n\t\t\tn, err = io.WriteString(w, `▌`)\n\t\t\twritten += int64(n)\n\t\t\tif err != nil {\n\t\t\t\treturn written, err\n\t\t\t}\n\t\t}\n\n\t\tn, err = fmt.Fprintf(w, \"\\n\")\n\t\twritten += int64(n)\n\t\tif err != nil {\n\t\t\treturn written, err\n\t\t}\n\t}\n\treturn written, nil\n}", "func (sa *ScanAPI) Log() {\n\t// Check access permissions\n\tif !sa.RequireProjectAccess(sa.pro.ProjectID, rbac.ActionRead, rbac.ResourceScan) {\n\t\treturn\n\t}\n\n\tuuid := sa.GetString(\":uuid\")\n\tbytes, err := scan.DefaultController.GetScanLog(uuid)\n\tif err != nil {\n\t\tsa.SendInternalServerError(errors.Wrap(err, \"scan API: log\"))\n\t\treturn\n\t}\n\n\tif bytes == nil {\n\t\t// Not found\n\t\tsa.SendNotFoundError(errors.Errorf(\"report with uuid %s does not exist\", uuid))\n\t\treturn\n\t}\n\n\tsa.Ctx.ResponseWriter.Header().Set(http.CanonicalHeaderKey(\"Content-Length\"), strconv.Itoa(len(bytes)))\n\tsa.Ctx.ResponseWriter.Header().Set(http.CanonicalHeaderKey(\"Content-Type\"), \"text/plain\")\n\t_, err = sa.Ctx.ResponseWriter.Write(bytes)\n\tif err != nil {\n\t\tsa.SendInternalServerError(errors.Wrap(err, \"scan API: log\"))\n\t}\n}", "func (c *B) Log(args ...interface{})", "func (lp LogPrinter) Log(event log.Event) {\n\tvar fields []string\n\tfor _, key := range event.Index {\n\t\tfields = append(fields, fmt.Sprintf(\"%s=%s\", key, event.Fields[key]))\n\t}\n\tfmt.Println(strings.Join(fields, \", \"))\n}", "func (f *StatsReq) AddLog(pageLog *PageLog) {\n\n}", "func (hf *historyFile) Log(line string) {\n\thf.l.Println(line)\n}", "func ResetHistogram(target uint32) {\n C.glowResetHistogram(gpResetHistogram, (C.GLenum)(target))\n}", "func (h *expiringHistogram) Observe(val float64) {\n\th.Histogram.Observe(val)\n\th.lastModSec = time.Now().Unix()\n}", "func funcHistogramCount(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tinVec := vals[0].(Vector)\n\n\tfor _, sample := range inVec {\n\t\t// Skip non-histogram samples.\n\t\tif sample.H == nil {\n\t\t\tcontinue\n\t\t}\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(sample.Metric),\n\t\t\tF: sample.H.Count,\n\t\t})\n\t}\n\treturn enh.Out\n}", "func (client *Client) DescribeAlertLogHistogramWithCallback(request *DescribeAlertLogHistogramRequest, callback func(response *DescribeAlertLogHistogramResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *DescribeAlertLogHistogramResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.DescribeAlertLogHistogram(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}", "func (p *Provider) Histogram(name string, value float64, tags map[string]string) error {\n\treturn p.client.Histogram(name, value, p.formatTags(tags), p.rate)\n}", "func (e *HistogramData) Print(out io.Writer, msg string) {\n\tif len(e.Data) == 0 {\n\t\t_, _ = fmt.Fprintf(out, \"%s : no data\\n\", msg) // nolint: gas\n\t\treturn\n\t}\n\t// the base counter part:\n\t_, _ = fmt.Fprintf(out, \"%s : count %d avg %.8g +/- %.4g min %g max %g sum %.9g\\n\",\n\t\tmsg, e.Count, e.Avg, e.StdDev, e.Min, e.Max, e.Sum)\n\t_, _ = fmt.Fprintln(out, \"# range, mid point, percentile, count\")\n\tsep := \">=\"\n\tfor i, b := range e.Data {\n\t\tif i > 0 {\n\t\t\tsep = \">\" // last interval is inclusive (of max value)\n\t\t}\n\t\t_, _ = fmt.Fprintf(out, \"%s %.6g <= %.6g , %.6g , %.2f, %d\\n\", sep, b.Start, b.End, (b.Start+b.End)/2., b.Percent, b.Count)\n\t}\n\n\t// print the information of target percentiles\n\tfor _, p := range e.Percentiles {\n\t\t_, _ = fmt.Fprintf(out, \"# target %g%% %.6g\\n\", p.Percentile, p.Value) // nolint: gas\n\t}\n}", "func logConsumer(executor *executor.BashExecutor, logDir string) {\n\tconfig := config.GetInstance()\n\tlogChannel := executor.LogChannel()\n\n\t// init path for shell, log and raw log\n\tlogPath := filepath.Join(logDir, executor.CmdID()+\".log\")\n\tf, _ := os.Create(logPath)\n\twriter := bufio.NewWriter(f)\n\n\t// upload log after flush!!\n\tdefer func() {\n\t\t_ = writer.Flush()\n\t\t_ = f.Close()\n\n\t\terr := uploadLog(logPath)\n\t\tutil.LogIfError(err)\n\n\t\tutil.LogDebug(\"[Exit]: logConsumer\")\n\t}()\n\n\tfor {\n\t\titem, ok := <-logChannel\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\n\t\tutil.LogDebug(\"[LOG]: %s\", item)\n\n\t\t// write to file\n\t\twriteLogToFile(writer, item)\n\n\t\t// send to queue\n\t\tif config.HasQueue() {\n\t\t\texchangeName := config.Settings.Queue.LogsExchange\n\t\t\tchannel := config.Queue.LogChannel\n\n\t\t\twriteLogToQueue(exchangeName, channel, item)\n\t\t}\n\t}\n}", "func LogScaledOnCue(r Registry, ch chan interface{}, scale time.Duration, l Logger) {\n\tdu := float64(scale)\n\tduSuffix := scale.String()[1:]\n\n\tfor _ = range ch {\n\t\tr.Each(func(name string, i interface{}) {\n\t\t\tswitch metric := i.(type) {\n\t\t\tcase Counter:\n\t\t\t\tl.Printf(\"counter %s\\n\", name)\n\t\t\t\tl.Printf(\" count: %9d\\n\", metric.Count())\n\t\t\tcase Gauge:\n\t\t\t\tl.Printf(\"gauge %s\\n\", name)\n\t\t\t\tl.Printf(\" value: %9d\\n\", metric.Value())\n\t\t\tcase GaugeFloat64:\n\t\t\t\tl.Printf(\"gauge %s\\n\", name)\n\t\t\t\tl.Printf(\" value: %f\\n\", metric.Value())\n\t\t\tcase Healthcheck:\n\t\t\t\tmetric.Check()\n\t\t\t\tl.Printf(\"healthcheck %s\\n\", name)\n\t\t\t\tl.Printf(\" error: %v\\n\", metric.Error())\n\t\t\tcase Histogram:\n\t\t\t\th := metric.Snapshot()\n\t\t\t\tps := h.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})\n\t\t\t\tl.Printf(\"histogram %s\\n\", name)\n\t\t\t\tl.Printf(\" count: %9d\\n\", h.Count())\n\t\t\t\tl.Printf(\" min: %9d\\n\", h.Min())\n\t\t\t\tl.Printf(\" max: %9d\\n\", h.Max())\n\t\t\t\tl.Printf(\" mean: %12.2f\\n\", h.Mean())\n\t\t\t\tl.Printf(\" stddev: %12.2f\\n\", h.StdDev())\n\t\t\t\tl.Printf(\" median: %12.2f\\n\", ps[0])\n\t\t\t\tl.Printf(\" 75%%: %12.2f\\n\", ps[1])\n\t\t\t\tl.Printf(\" 95%%: %12.2f\\n\", ps[2])\n\t\t\t\tl.Printf(\" 99%%: %12.2f\\n\", ps[3])\n\t\t\t\tl.Printf(\" 99.9%%: %12.2f\\n\", ps[4])\n\t\t\tcase Meter:\n\t\t\t\tm := metric.Snapshot()\n\t\t\t\tl.Printf(\"meter %s\\n\", name)\n\t\t\t\tl.Printf(\" count: %9d\\n\", m.Count())\n\t\t\t\tl.Printf(\" 1-min rate: %12.2f\\n\", m.Rate1())\n\t\t\t\tl.Printf(\" 5-min rate: %12.2f\\n\", m.Rate5())\n\t\t\t\tl.Printf(\" 15-min rate: %12.2f\\n\", m.Rate15())\n\t\t\t\tl.Printf(\" mean rate: %12.2f\\n\", m.RateMean())\n\t\t\tcase Timer:\n\t\t\t\tt := metric.Snapshot()\n\t\t\t\tps := t.Percentiles([]float64{0.5, 0.75, 0.95, 0.99, 0.999})\n\t\t\t\tl.Printf(\"timer %s\\n\", name)\n\t\t\t\tl.Printf(\" count: %9d\\n\", t.Count())\n\t\t\t\tl.Printf(\" min: %12.2f%s\\n\", float64(t.Min())/du, duSuffix)\n\t\t\t\tl.Printf(\" max: %12.2f%s\\n\", float64(t.Max())/du, duSuffix)\n\t\t\t\tl.Printf(\" mean: %12.2f%s\\n\", t.Mean()/du, duSuffix)\n\t\t\t\tl.Printf(\" stddev: %12.2f%s\\n\", t.StdDev()/du, duSuffix)\n\t\t\t\tl.Printf(\" median: %12.2f%s\\n\", ps[0]/du, duSuffix)\n\t\t\t\tl.Printf(\" 75%%: %12.2f%s\\n\", ps[1]/du, duSuffix)\n\t\t\t\tl.Printf(\" 95%%: %12.2f%s\\n\", ps[2]/du, duSuffix)\n\t\t\t\tl.Printf(\" 99%%: %12.2f%s\\n\", ps[3]/du, duSuffix)\n\t\t\t\tl.Printf(\" 99.9%%: %12.2f%s\\n\", ps[4]/du, duSuffix)\n\t\t\t\tl.Printf(\" 1-min rate: %12.2f\\n\", t.Rate1())\n\t\t\t\tl.Printf(\" 5-min rate: %12.2f\\n\", t.Rate5())\n\t\t\t\tl.Printf(\" 15-min rate: %12.2f\\n\", t.Rate15())\n\t\t\t\tl.Printf(\" mean rate: %12.2f\\n\", t.RateMean())\n\t\t\t}\n\t\t})\n\t}\n}", "func (self *State)Log(a any)any{\n self.IncOperations(self.coeff[\"log\"]+self.off[\"log\"])\n return wrap1(a,math.Log)\n}", "func (h *Histogram) RecordN(v float64, n int) {\n\th.Counter.RecordN(v, n)\n\th.record(v, n)\n}", "func Log(values map[string]interface{}) {\n\t_, ok := values[LevelKey]\n\tif !ok {\n\t\tvalues[LevelKey] = LevelInfo\n\t}\n\n\tfor _, l := range loggers {\n\t\tl.Log(values)\n\t}\n}", "func logStats() {\n\tif requestsReceived > 0 {\n\t\tavgRequestMs := strconv.FormatFloat(totalRequestTime/float64(requestsReceived), 'f', 3, 64)\n\t\tlogger.Printf(\"%d requests, %d bytes received (avg. %sms)\\n\", requestsReceived, bytesReceived, avgRequestMs)\n\t}\n\trequestsReceived = 0\n\tbytesReceived = 0\n\ttotalRequestTime = 0.0\n}", "func PublishHistograms(ctx context.Context, metrics map[string]float64) {\n\n\t// Spans are not processed by the collector until the span ends, so to prevent any delay\n\t// in processing the stats when the current span is long-lived we create a new span for every call.\n\t// suffix the span name with SpannameSuffixDummy to denote that it is used only to hold a metric and isn't itself of any interest\n\tspan, ctx := opentracing.StartSpanFromContext(ctx, \"histogram_metrics\"+SpannameSuffixDummy)\n\tdefer span.Finish()\n\n\tfor key, value := range metrics {\n\t\t// The field name we use is the metric name prepended with FieldnamePrefixHistogram to designate that it is a Prometheus histogram metric\n\t\t// The collector will replace that prefix with \"fn_\" and use the result as the Prometheus metric name.\n\t\tfieldname := FieldnamePrefixHistogram + key\n\t\tspan.LogFields(log.Float64(fieldname, value))\n\t}\n}", "func (it *emptyIterator) AtHistogram() (int64, *histogram.Histogram) { return 0, nil }", "func AnalyzeAllResponses(a *Analyzer, ar []*Response) {\n f, err := os.Create(\"swing-data.csv\")\n if err != nil { panic(err) }\n defer f.Close()\n w := csv.NewWriter(f)\n defer w.Flush()\n Range := histogram.Range(-1.0, 200, .01)\n h, err := histogram.NewHistogram(Range)\n if err != nil {\n panic(err)\n }\n for _, resp := range ar {\n for _, oi := range a.AnalyzeStock(resp) {\n var toWrite = []string{\n strconv.FormatFloat(oi.Swing, 'f', 4, 64),\n strconv.FormatFloat(oi.Ret, 'f', 4, 64),\n }\n w.Write(toWrite)\n h.Add(oi.Ret)\n }\n }\n fmt.Println(\"MEAN: \", h.Mean())\n fmt.Println(\"SIGMA \", h.Sigma())\n}", "func LogHeap() {\n\tlogHeap(os.Stderr)\n}", "func (l *logger) WriteLog(s ...interface{}) {\n\tl.Lock()\n\tdefer l.Unlock()\n\tl.buffer.WriteString(fmt.Sprintf(\"%s %s\", l.timeInterface.GetNow(), l.GetLogLevel()))\n\tl.makeLogContent(s...)\n\t//l.mysqlChan <- l.buffer.String()\n\tl.buffer.WriteString(\"\\n\")\n\tl.fd.Write(l.buffer.Bytes())\n\tl.buffer.Reset()\n}", "func (th *telemetryHandle) TrackLog(report Report) {\n\t// Initialize new trace message\n\ttrace := appinsights.NewTraceTelemetry(report.Message, appinsights.Warning)\n\n\t// will be empty if cns used as telemetry service for cni\n\tif th.appVersion == \"\" {\n\t\tth.appVersion = report.AppVersion\n\t}\n\n\t// Override few of existing columns with metadata\n\ttrace.Tags.User().SetAuthUserId(runtime.GOOS)\n\ttrace.Tags.Operation().SetId(report.Context)\n\ttrace.Tags.Operation().SetParentId(th.appVersion)\n\ttrace.Tags.Application().SetVer(th.appVersion)\n\ttrace.Properties[hostNameKey], _ = os.Hostname()\n\n\t// copy app specified custom dimension\n\tfor key, value := range report.CustomDimensions {\n\t\ttrace.Properties[key] = value\n\t}\n\n\ttrace.Properties[appNameStr] = th.appName\n\n\t// Acquire read lock to read metadata\n\tth.rwmutex.RLock()\n\tmetadata := th.metadata\n\tth.rwmutex.RUnlock()\n\n\t// Check if metadata is populated\n\tif metadata.SubscriptionID != \"\" {\n\t\t// copy metadata from wireserver to trace\n\t\ttrace.Tags.User().SetAccountId(metadata.SubscriptionID)\n\t\ttrace.Tags.User().SetId(metadata.VMName)\n\t\ttrace.Properties[locationStr] = metadata.Location\n\t\ttrace.Properties[resourceGroupStr] = metadata.ResourceGroupName\n\t\ttrace.Properties[vmSizeStr] = metadata.VMSize\n\t\ttrace.Properties[osVersionStr] = metadata.OSVersion\n\t\ttrace.Properties[vmIDStr] = metadata.VMID\n\t\ttrace.Tags.Session().SetId(metadata.VMID)\n\t}\n\n\t// send to appinsights resource\n\tth.client.Track(trace)\n}", "func PublishHistogramToSpan(span opentracing.Span, key string, value float64) {\n\n\t// The field name we use is the metric name prepended with FieldnamePrefixHistogram to designate that it is a Prometheus histogram metric\n\t// The collector will replace that prefix with \"fn_\" and use the result as the Prometheus metric name.\n\tfieldname := FieldnamePrefixHistogram + key\n\tspan.LogFields(log.Float64(fieldname, value))\n}", "func (o *StatsAppliancesListAllOf) SetLogForwarderCount(v float32) {\n\to.LogForwarderCount = &v\n}", "func (e errChunkIterator) AtHistogram() (int64, *histogram.Histogram) { panic(\"not implemented\") }", "func (c *Clac) Log() error {\n\treturn c.applyFloat(1, func(vals []value.Value) (value.Value, error) {\n\t\treturn binary(value.Int(10), \"log\", vals[0])\n\t})\n}", "func (lr *LogRecorder) Log(log string) {\n\tlr.logs = append(lr.logs, log)\n\t//fmt.Printf(\"DR: %s\\n\", log)\n}", "func (v HistogramValue) Print(w io.Writer) {\n\tavg := float64(v.Sum) / float64(v.Count)\n\tfmt.Fprintf(w, \"Count: %d Min: %d Max: %d Avg: %.2f\\n\", v.Count, v.Min, v.Max, avg)\n\tfmt.Fprintf(w, \"%s\\n\", strings.Repeat(\"-\", 60))\n\tif v.Count <= 0 {\n\t\treturn\n\t}\n\n\tmaxBucketDigitLen := len(strconv.FormatFloat(v.Buckets[len(v.Buckets)-1].LowBound, 'f', 6, 64))\n\tif maxBucketDigitLen < 3 {\n\t\t// For \"inf\".\n\t\tmaxBucketDigitLen = 3\n\t}\n\tmaxCountDigitLen := len(strconv.FormatInt(v.Count, 10))\n\tpercentMulti := 100 / float64(v.Count)\n\n\taccCount := int64(0)\n\tfor i, b := range v.Buckets {\n\t\tfmt.Fprintf(w, \"[%*f, \", maxBucketDigitLen, b.LowBound)\n\t\tif i+1 < len(v.Buckets) {\n\t\t\tfmt.Fprintf(w, \"%*f)\", maxBucketDigitLen, v.Buckets[i+1].LowBound)\n\t\t} else {\n\t\t\tfmt.Fprintf(w, \"%*s)\", maxBucketDigitLen, \"inf\")\n\t\t}\n\n\t\taccCount += b.Count\n\t\tfmt.Fprintf(w, \" %*d %5.1f%% %5.1f%%\", maxCountDigitLen, b.Count, float64(b.Count)*percentMulti, float64(accCount)*percentMulti)\n\n\t\tconst barScale = 0.1\n\t\tbarLength := int(float64(b.Count)*percentMulti*barScale + 0.5)\n\t\tfmt.Fprintf(w, \" %s\\n\", strings.Repeat(\"#\", barLength))\n\t}\n}", "func (a *LogsApiService) GetLogHistogramData(ctx _context.Context) ApiGetLogHistogramDataRequest {\n\treturn ApiGetLogHistogramDataRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (c *T) Log(args ...interface{})", "func (c *Client) Log(lvl level, message string, extra map[string]string) error {\n\tfor _, o := range c.cfg.Outputs {\n\t\terr := o.output(newLogBody(lvl.toString(), message, extra))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (h *Histogram) Print(out io.Writer, msg string, percentiles []float64) {\n\th.Export().CalcPercentiles(percentiles).Print(out, msg)\n}", "func NewHistogram(w io.Writer, key string, interval time.Duration) metrics.Histogram {\n\th := make(chan string)\n\tgo fwd(w, key, interval, h)\n\treturn statsdHistogram(h)\n}", "func (uplink *SenseUplink) LogMetric(invocation *enigma.Invocation, metrics *enigma.InvocationMetrics, result *enigma.InvocationResponse) {\n\trequestID := -1\n\tif result != nil {\n\t\trequestID = result.RequestID\n\t}\n\n\tvar method string\n\tvar params string\n\tif invocation != nil {\n\t\tmethod = invocation.Method\n\t\tif invocation.RemoteObject != nil && strings.TrimSpace(invocation.RemoteObject.GenericId) != \"\" {\n\t\t\tbuf := helpers.NewBuffer()\n\t\t\tbuf.WriteString(method)\n\t\t\tbuf.WriteString(\" [\")\n\t\t\tbuf.WriteString(invocation.RemoteObject.GenericId)\n\t\t\tbuf.WriteString(\"]\")\n\t\t\tif buf.Error == nil {\n\t\t\t\tmethod = buf.String()\n\t\t\t}\n\t\t}\n\t\tif invocation.Params != nil {\n\t\t\tif jB, err := json.Marshal(invocation.Params); err == nil && jB != nil {\n\t\t\t\tparams = string(jB)\n\t\t\t}\n\t\t}\n\t}\n\n\tif err := uplink.trafficMetrics.Update(metrics.SocketWriteTimestamp, metrics.SocketReadTimestamp,\n\t\tint64(metrics.RequestMessageSize), int64(metrics.ResponseMessageSize)); err != nil {\n\t\tuplink.logEntry.LogError(err)\n\t}\n\n\tif uplink.Traffic != nil {\n\t\tuplink.logEntry.LogTrafficMetric(metrics.SocketReadTimestamp.Sub(metrics.SocketWriteTimestamp).Nanoseconds(),\n\t\t\tuint64(metrics.RequestMessageSize), uint64(metrics.ResponseMessageSize), requestID, method, params, \"WS\", \"\")\n\t}\n\n\treqStall := metrics.SocketWriteTimestamp.Sub(metrics.InvocationRequestTimestamp)\n\tif reqStall > constant.MaxStallTime {\n\t\tuplink.logEntry.LogDetail(logger.WarningLevel, \"WS request stall\", strconv.FormatInt(reqStall.Nanoseconds(), 10))\n\t}\n\n\trespStall := metrics.InvocationResponseTimestamp.Sub(metrics.SocketReadTimestamp)\n\tif !metrics.InvocationRequestTimestamp.IsZero() && !metrics.SocketReadTimestamp.IsZero() && respStall > constant.MaxStallTime {\n\t\tuplink.logEntry.LogDetail(logger.WarningLevel, \"WS response stall\", strconv.FormatInt(respStall.Nanoseconds(), 10))\n\t}\n\n}", "func (b *raftBadger) StoreLogs(logs []*raft.Log) error {\n\n\tmaxBatchSize := b.gs.GetStore().(*badger.DB).MaxBatchSize()\n\tmin := uint64(0)\n\tmax := uint64(len(logs))\n\tranges := b.generateRanges(min, max, maxBatchSize)\n\tfor _, r := range ranges {\n\t\ttxn := b.gs.GetStore().(*badger.DB).NewTransaction(true)\n\t\tdefer txn.Discard()\n\t\tfor index := r.from; index < r.to; index++ {\n\t\t\tlog := logs[index]\n\t\t\tvar out bytes.Buffer\n\t\t\tenc := gob.NewEncoder(&out)\n\t\t\tb.logger.Info(\"StoreLogs\", \"index\", hclog.Fmt(\"%d\", log.Index), hclog.Fmt(\"%v\", \"term\"), log.Term)\n\t\t\tenc.Encode(log)\n\t\t\tif err := txn.Set(logKeyOf(log.Index), out.Bytes()); err != nil {\n\t\t\t\tb.logger.Error(\"failed saving log\", \"index\", hclog.Fmt(\"%d\", log.Index), hclog.Fmt(\"%v\", \"term\"), log.Term)\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\tif err := txn.Commit(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tb.logger.Info(\"LogStored\")\n\n\treturn nil\n}", "func (lgr Logger) LogBloomFilter(brm *internal.BloomReachabilityMap) error {\n\tlgr.lock.Lock()\n\tdefer lgr.lock.Unlock()\n\treturn lgr.log_enc.Encode(brm.Conglomerate)\n}", "func (self *GameHeart) Logs(msg *HeartMessageType) {\n\n}", "func (m *metricMysqlLogOperations) emit(metrics pmetric.MetricSlice) {\n\tif m.settings.Enabled && m.data.Sum().DataPoints().Len() > 0 {\n\t\tm.updateCapacity()\n\t\tm.data.MoveTo(metrics.AppendEmpty())\n\t\tm.init()\n\t}\n}", "func (rest *Restful) getLog(w http.ResponseWriter, r *http.Request) {\n\n\tlog := rest.server.raftState.getLog()\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tvars := mux.Vars(r)\n\tlogSize := vars[\"logsize\"]\n\tlimit := DefaultLogSize\n\n\tif len(logSize) > 0 {\n\t\tif i2, err := strconv.ParseInt(logSize, 10, 64); err == nil {\n\t\t\tlimit = int(i2)\n\t\t}\n\t}\n\n\tvar resp LogRespond\n\tchunkSize := Max(0, len(log)-limit)\n\tresp.Last_page = 0\n\tfor i := len(log) - 1; i >= chunkSize; i-- {\n\t\tresp.Data = append(resp.Data, LogRespondEntry{\n\t\t\tKey: log[i].Command.Key,\n\t\t\tValue: string(log[i].Command.Value),\n\t\t\tTerm: log[i].Term,\n\t\t\tSynced: true,\n\t\t})\n\t\tresp.Last_page++\n\t}\n\n\t//\n\tjs, err := json.Marshal(resp)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\trespSize, err := w.Write(js)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tif respSize == 0 {\n\t\thttp.Error(w, fmt.Errorf(\"empty respond\").Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n}", "func (ptr *KeyholeInfo) Log(s string) {\n\tptr.Logs = append(ptr.Logs, fmt.Sprintf(`%v %v`, time.Now().Format(time.RFC3339), s))\n}", "func startContinuousLog(dir string, sleepCap time.Duration, restart func()) {\n\t// Create the folder for all of the profiling results.\n\terr := os.MkdirAll(dir, 0700)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\t// Continuously log statistics about the running Sia application.\n\tgo func() {\n\t\t// Create the logger.\n\t\tlog, err := persist.NewFileLogger(filepath.Join(dir, \"continuousStats.log\"))\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Stats logging failed:\", err)\n\t\t\treturn\n\t\t}\n\t\t// Collect statistics in an infinite loop.\n\t\tsleepTime := time.Second * 10\n\t\tfor {\n\t\t\t// Sleep for an exponential amount of time each iteration, this\n\t\t\t// keeps the size of the log small while still providing lots of\n\t\t\t// information.\n\t\t\trestart()\n\t\t\ttime.Sleep(sleepTime)\n\t\t\tsleepTime = time.Duration(1.2 * float64(sleepTime))\n\t\t\tif sleepCap != 0*time.Second && sleepTime > sleepCap {\n\t\t\t\tsleepTime = sleepCap\n\t\t\t}\n\t\t\tvar m runtime.MemStats\n\t\t\truntime.ReadMemStats(&m)\n\t\t\tlog.Printf(\"\\n\\tGoroutines: %v\\n\\tAlloc: %v\\n\\tTotalAlloc: %v\\n\\tHeapAlloc: %v\\n\\tHeapSys: %v\\n\", runtime.NumGoroutine(), m.Alloc, m.TotalAlloc, m.HeapAlloc, m.HeapSys)\n\t\t}\n\t}()\n}", "func (l *SyncLog) Log(v ...interface{}) {\n\tl.mu.Lock()\n\tfmt.Fprintln(l.w, v...)\n\tl.mu.Unlock()\n}" ]
[ "0.6535885", "0.617781", "0.5926228", "0.58287036", "0.56685245", "0.56133246", "0.5592528", "0.5540666", "0.5535175", "0.5519587", "0.5511253", "0.54948723", "0.54137176", "0.53197587", "0.5298534", "0.5259716", "0.5244614", "0.52109516", "0.5198363", "0.51469094", "0.51284784", "0.5104465", "0.5073101", "0.5030912", "0.50155145", "0.5010277", "0.50078166", "0.49849936", "0.49712688", "0.49550155", "0.49522418", "0.49515864", "0.49494886", "0.4948726", "0.49440366", "0.4943279", "0.49419263", "0.4941634", "0.49362165", "0.49343267", "0.49147543", "0.49117485", "0.49100372", "0.49077117", "0.48943338", "0.48792464", "0.48660988", "0.4865659", "0.48561016", "0.48434392", "0.4836941", "0.4832549", "0.4825747", "0.48215714", "0.4821032", "0.48177293", "0.4808029", "0.48040548", "0.4800645", "0.47936714", "0.47874433", "0.4783592", "0.4780531", "0.47749004", "0.47722965", "0.4765613", "0.47582665", "0.4747368", "0.474201", "0.47329742", "0.47285154", "0.47284606", "0.47114858", "0.47058958", "0.47035697", "0.469729", "0.46875244", "0.4685019", "0.46747407", "0.46733958", "0.4668372", "0.46611694", "0.46608278", "0.4650004", "0.4645615", "0.46380416", "0.46368247", "0.463146", "0.46308133", "0.46265557", "0.46091154", "0.4602206", "0.4600502", "0.4600214", "0.45956066", "0.45907786", "0.4590538", "0.4589135", "0.45856166", "0.4584005" ]
0.6701168
0
Reset clears the data. Reset it to NewHistogram state.
func (h *Histogram) Reset() { h.Counter.Reset() // Leave Offset and Divider alone for i := 0; i < len(h.Hdata); i++ { h.Hdata[i] = 0 } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ResetHistogram(target uint32) {\n\tC.glowResetHistogram(gpResetHistogram, (C.GLenum)(target))\n}", "func (o *CompartimentoHistorico) UnsetData() {\n\to.Data.Unset()\n}", "func (ms HistogramDataPoint) InitEmpty() {\n\t*ms.orig = &otlpmetrics.HistogramDataPoint{}\n}", "func (m *metricVec) Reset() { m.metricMap.Reset() }", "func (m *Metrics) Reset() {\n\tmetrics.Reset()\n\tm.Lock()\n\tm.gauges = make(map[string]metrics.Gauge)\n\tm.counters = make(map[string]metrics.Counter)\n\tm.histograms = make(map[string]*metrics.Histogram)\n\tm.Unlock()\n}", "func (s *dataSet) Reset() {\n\ts.dataPtr = len(s.buf)\n\ts.dataWritten = 0\n\ts.idxPtr = 0\n\ts.idxWritten = 0\n}", "func (ds *Dataset) Clear() {\n\tds.min = math.MaxFloat64\n\tds.max = math.SmallestNonzeroFloat64\n\tds.product = 1\n\tds.total = 0\n\tds.recipsum = 0\n\tds.values = ds.values[:0]\n}", "func Reset() {\n\tmutex.Lock()\n\taverages = make(map[string]*currentAverage)\n\tmutex.Unlock()\n}", "func ResetHistogram(target uint32) {\n C.glowResetHistogram(gpResetHistogram, (C.GLenum)(target))\n}", "func (r *BasicResampler) Reset() {\n\tr.sampleAggregates = r.sampleAggregates[:0]\n}", "func (m *RestaurantMutation) ResetHistories() {\n\tm.histories = nil\n\tm.clearedhistories = false\n\tm.removedhistories = nil\n}", "func (r *PresampledResampler) Reset() {\n\tr.sampleAggregates = r.sampleAggregates[:0]\n}", "func (da *DataFrame) Reset() {\n\tda.chunk = 0\n\tda.nobs = 0\n\tda.done = false\n}", "func (ms HistogramBucket) InitEmpty() {\n\t*ms.orig = &otlpmetrics.HistogramDataPoint_Bucket{}\n}", "func (tt *TtTable) Clear() {\n\t// Create new slice/array - garbage collections takes care of cleanup\n\ttt.data = make([]TtEntry, tt.maxNumberOfEntries, tt.maxNumberOfEntries)\n\ttt.numberOfEntries = 0\n\ttt.Stats = TtStats{}\n}", "func (r *RegionStatistics) Reset() {\n\tregionMissPeerRegionCounter.Set(0)\n\tregionExtraPeerRegionCounter.Set(0)\n\tregionDownPeerRegionCounter.Set(0)\n\tregionPendingPeerRegionCounter.Set(0)\n\tregionOfflinePeerRegionCounter.Set(0)\n\tregionLearnerPeerRegionCounter.Set(0)\n\tregionEmptyRegionCounter.Set(0)\n\tregionOversizedRegionCounter.Set(0)\n\tregionUndersizedRegionCounter.Set(0)\n\tregionWitnessLeaderRegionCounter.Set(0)\n}", "func (m *metricMap) Reset() {\n\tm.mtx.Lock()\n\tdefer m.mtx.Unlock()\n\n\tfor h := range m.metrics {\n\t\tdelete(m.metrics, h)\n\t}\n}", "func (s *UniformSample) Clear() {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\ts.count = 0\n\ts.values = make([]int64, 0, s.reservoirSize)\n}", "func (d *RabinKarp64) Reset() {\n\td.tables = nil\n\td.value = 0\n\td.window = d.window[:0]\n\td.oldest = 0\n\td.updateTables()\n}", "func (s *Slab) Reset() {\n\ts.entries = s.entries[:0]\n\ts.next = 0\n\ts.len = 0\n}", "func (ht *HashTable) Clear() (err error) {\n\tif err = ht.DataFile.Clear(); err != nil {\n\t\treturn\n\t}\n\tht.calculateNumBuckets()\n\treturn\n}", "func (s *Store) Reset() {\n\ts.access.Lock()\n\tdefer s.access.Unlock()\n\n\tfor key, _ := range s.data {\n\t\tdelete(s.data, key)\n\t\ts.sendDataChanged()\n\t\ts.doKeyChanged(key)\n\t}\n\n\ts.data = make(map[string]interface{})\n}", "func (b *Bucket) Reset() {\n\tb[0].Store(0)\n\tb[1].Store(0)\n}", "func (ds *DataSet) ResetData() {\n\tds.data = nil\n}", "func (c *ChannelData) Reset() {\n\tc.Raw = c.Raw[:0]\n\tc.Length = 0\n\tc.Data = c.Data[:0]\n}", "func (o *Options) ResetSetData(aggTypes aggregation.Types) {\n\to.HasExpensiveAggregations = isExpensive(aggTypes)\n}", "func (e *fastGen) Reset() {\n\tif cap(e.hist) < allocHistory {\n\t\te.hist = make([]byte, 0, allocHistory)\n\t}\n\t// We offset current position so everything will be out of reach.\n\t// If we are above the buffer reset it will be cleared anyway since len(hist) == 0.\n\tif e.cur <= bufferReset {\n\t\te.cur += maxMatchOffset + int32(len(e.hist))\n\t}\n\te.hist = e.hist[:0]\n}", "func (a *MetricAggregator) Reset() {\n\ta.metricMapsReceived = 0\n\tnowNano := gostatsd.Nanotime(a.now().UnixNano())\n\n\ta.metricMap.Counters.Each(func(key, tagsKey string, counter gostatsd.Counter) {\n\t\tif isExpired(a.expiryIntervalCounter, nowNano, counter.Timestamp) {\n\t\t\tdeleteMetric(key, tagsKey, a.metricMap.Counters)\n\t\t} else {\n\t\t\ta.metricMap.Counters[key][tagsKey] = gostatsd.Counter{\n\t\t\t\tTimestamp: counter.Timestamp,\n\t\t\t\tSource: counter.Source,\n\t\t\t\tTags: counter.Tags,\n\t\t\t}\n\t\t}\n\t})\n\n\ta.metricMap.Timers.Each(func(key, tagsKey string, timer gostatsd.Timer) {\n\t\tif isExpired(a.expiryIntervalTimer, nowNano, timer.Timestamp) {\n\t\t\tdeleteMetric(key, tagsKey, a.metricMap.Timers)\n\t\t} else {\n\t\t\tif hasHistogramTag(timer) {\n\t\t\t\ta.metricMap.Timers[key][tagsKey] = gostatsd.Timer{\n\t\t\t\t\tTimestamp: timer.Timestamp,\n\t\t\t\t\tSource: timer.Source,\n\t\t\t\t\tTags: timer.Tags,\n\t\t\t\t\tValues: timer.Values[:0],\n\t\t\t\t\tHistogram: emptyHistogram(timer, a.histogramLimit),\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\ta.metricMap.Timers[key][tagsKey] = gostatsd.Timer{\n\t\t\t\t\tTimestamp: timer.Timestamp,\n\t\t\t\t\tSource: timer.Source,\n\t\t\t\t\tTags: timer.Tags,\n\t\t\t\t\tValues: timer.Values[:0],\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t})\n\n\ta.metricMap.Gauges.Each(func(key, tagsKey string, gauge gostatsd.Gauge) {\n\t\tif isExpired(a.expiryIntervalGauge, nowNano, gauge.Timestamp) {\n\t\t\tdeleteMetric(key, tagsKey, a.metricMap.Gauges)\n\t\t}\n\t\t// No reset for gauges, they keep the last value until expiration\n\t})\n\n\ta.metricMap.Sets.Each(func(key, tagsKey string, set gostatsd.Set) {\n\t\tif isExpired(a.expiryIntervalSet, nowNano, set.Timestamp) {\n\t\t\tdeleteMetric(key, tagsKey, a.metricMap.Sets)\n\t\t} else {\n\t\t\ta.metricMap.Sets[key][tagsKey] = gostatsd.Set{\n\t\t\t\tValues: make(map[string]struct{}),\n\t\t\t\tTimestamp: set.Timestamp,\n\t\t\t\tSource: set.Source,\n\t\t\t\tTags: set.Tags,\n\t\t\t}\n\t\t}\n\t})\n}", "func (frac *Fractal) Clear() {\n\tfrac.R = histo.New(frac.Width, frac.Height)\n\tfrac.G = histo.New(frac.Width, frac.Height)\n\tfrac.B = histo.New(frac.Width, frac.Height)\n}", "func Reset() {\n\tstats.Reset()\n}", "func (l *LabelStatistics) Reset() {\n\tregionLabelLevelGauge.Reset()\n}", "func (ref *digest) Reset() {\n\tref.ptr = 0\n\tref.cnt = 0\n\tcopy(ref.h[:], kInit[:])\n}", "func (e *Timing) Reset() {\n\te.Min = 0\n\te.Max = 0\n\te.Value = 0\n\te.Values = make(float64Slice, 0)\n\te.Count = 0\n}", "func (h *ihash) Reset() {}", "func (h *MemHash) Reset() {\n\th.buf = h.buf[:0]\n}", "func (mu *MuHash) Reset() {\n\tmu.numerator.SetToOne()\n\tmu.denominator.SetToOne()\n}", "func (m *MetricUnion) Reset() { *m = emptyMetricUnion }", "func (s *State) Reset() {\n\ts.hash = 0\n\ts.clen = 0\n\ts.tail = nil\n}", "func (b *Buffer) Clear() {\n\tb.series = make(map[string]*influxdb.Series)\n\tb.size = 0\n}", "func (h *LogHook) Reset() {\n\th.Lock()\n\tdefer h.Unlock()\n\th.Entries = make([]LogEntry, 0)\n}", "func (m *ValuesResultArrayHash) Reset() {\n\tfor hash, entry := range m.lookup {\n\t\tm.removevaluesResultArrayHashKey(hash, entry.key)\n\t}\n}", "func (r *RunningStats) Clear() {\n\tr.n = 0\n\tr.m1 = 0.0\n\tr.m2 = 0.0\n\tr.m3 = 0.0\n\tr.m4 = 0.0\n}", "func (dc *DatadogCollector) Reset() {}", "func (g *Graph) Reset() {\n\tg.edges = make(map[uint32](map[uint32]float64))\n\tg.nodes = make(map[uint32]*Page)\n}", "func (i *IQR) Clear() {\n\ti.quantile.Clear()\n}", "func (m *dirtySeriesMap) Reset() {\n\tfor hash, entry := range m.lookup {\n\t\tm.removeMapKey(hash, entry.key)\n\t}\n}", "func (h *HashReader) Reset() {\n\th.count = 0\n\th.hasher.Reset()\n}", "func (fb *FlatBatch) Reset() {\n\tfb.lock.Lock()\n\tdefer fb.lock.Unlock()\n\n\tfb.keysize, fb.valsize = 0, 0\n\tfb.keys = fb.keys[:0]\n\tfb.vals = fb.vals[:0]\n}", "func (p *ProgressBar) Reset() {\n\tp.lock.Lock()\n\tdefer p.lock.Unlock()\n\n\tp.state = getBlankState()\n}", "func (m *AreaMutation) ResetStatistic() {\n\tm.statistic = nil\n\tm.clearedstatistic = false\n}", "func (m *AreaMutation) ResetStatistic() {\n\tm.statistic = nil\n\tm.clearedstatistic = false\n}", "func (sl *StagesLatency) ResetStatistics() {\n\tsl.first = duplicateSlice(sl.last)\n\tsl.FirstCollected = sl.LastCollected\n\n\tsl.calculate()\n}", "func (a *MovAvg) Reset() {\n\ta.r, a.w = 0, 0\n\tfor i := range a.sum {\n\t\ta.sum[i] = 0\n\t\ta.v[i] = 0\n\t}\n}", "func (c *Termination) Reset() {\n\tc.pushIndex = 0\n\tif c.etc != nil {\n\t\tc.etc = new([]Value)\n\t}\n}", "func (reader *Reader) Reset() (e error) {\n\te = reader.Flush()\n\tif e != nil {\n\t\treturn\n\t}\n\te = reader.dataset.(tabula.DatasetInterface).Reset()\n\treturn\n}", "func (this *channelStruct) Clear() {\n\tthis.samples = make([]float64, 0)\n}", "func (r *TimeBucketResults) Clear() {\n\tr.buckets = nil\n}", "func (a *Agent) Reset() {\n\ta.Sketch.Reset()\n\ta.Buf = nil // TODO: pool\n}", "func (v *Eth1DataVotesView) Reset() error {\n\treturn v.SetBacking(v.Type().DefaultNode())\n}", "func (d *Dropping) Reset() {\n\t// We need to create a new slice because the existing one\n\t// may have already been put onto o channel or referenced\n\t// in another way.\n\td.batch = make(Batch, d.maxSize)\n\td.next = 0\n}", "func (bbo *TabularBBO) Reset(rng *mathlib.Random) {\n\tbbo.ep.Wipe()\n}", "func (c *TimeAvgAggregator) Reset(w Window) {\n\tc.integral = 0\n\tif c.initialized {\n\t\tc.startTime = w.StartTime\n\t\tc.startValue = c.endValue\n\t\tc.endTime = w.EndTime\n\t}\n}", "func (m *MetricVec) Reset() {\n\tm.mtx.Lock()\n\tdefer m.mtx.Unlock()\n\n\tfor h := range m.children {\n\t\tdelete(m.children, h)\n\t}\n}", "func (mb *MutableBag) Reset() {\n\tmb.values = make(map[string]interface{})\n}", "func (hc *cmdCollector) Reset() {\n}", "func (rc *ReadCache) Clear() {\n\trc.lock.Lock()\n\trc.bins = nil\n\trc.bins = make(map[hash.Hash160][]byte)\n\trc.lock.Unlock()\n}", "func (s *Stats) Reset() {\n\ts.Client.Init()\n\ts.Error.Init()\n\ts.Subscription.Init()\n}", "func (h *History) Clear() {\n\th.tmp = make([]string, len(h.histories))\n\tfor i := range h.histories {\n\t\th.tmp[i] = h.histories[i]\n\t}\n\th.tmp = append(h.tmp, \"\")\n\th.selected = len(h.tmp) - 1\n}", "func (s *Snapshot) Reset() {\n\t*s = Snapshot{\n\t\tCounters: events.Counters(s.Counters).Reset(),\n\t}\n}", "func (h *Split) Reset() {\n\tif h.Max == 0 {\n\t\th.Max = 1<<31 - 1\n\t}\n\tif h.Mask == 0 {\n\t\th.Mask = 1<<12 - 1\n\t}\n\tif h.Cond == 0 {\n\t\th.Cond = 1<<12 - 1\n\t}\n\tif h.Window == 0 {\n\t\th.Window = 1 << 13\n\t}\n\tif h.Ring == nil || len(h.Ring) != h.Window {\n\t\th.Ring = make([]uint8, h.Window)\n\t} else {\n\t\tfor j := range h.Ring {\n\t\t\th.Ring[j] = 0\n\t\t}\n\t}\n}", "func (m *Tracker) Reset() {\n\tm.kmap = nil\n\tm.deepcopy = false\n}", "func (b *batch) Reset() {\n\tb.batch.Clear()\n\tb.size = 0\n}", "func (s *Statistics) reset() {\n\ts.cycles++\n\ts.totalMessagesCleared += s.messagesCleared\n\n\ts.memoryCleared = 0\n\ts.messagesCleared = 0\n}", "func (a *AzureMonitor) Reset() {\n\tfor tbucket := range a.cache {\n\t\t// Remove aggregates older than 30 minutes\n\t\tif tbucket.Before(a.timeFunc().Add(-time.Minute * 30)) {\n\t\t\tdelete(a.cache, tbucket)\n\t\t\tcontinue\n\t\t}\n\t\t// Metrics updated within the latest 1m have not been pushed and should\n\t\t// not be cleared.\n\t\tif tbucket.After(a.timeFunc().Add(-time.Minute)) {\n\t\t\tcontinue\n\t\t}\n\t\tfor id := range a.cache[tbucket] {\n\t\t\ta.cache[tbucket][id].updated = false\n\t\t}\n\t}\n}", "func (ms HistogramBucketExemplar) InitEmpty() {\n\t*ms.orig = &otlpmetrics.HistogramDataPoint_Bucket_Exemplar{}\n}", "func (b *BloomIndexer) Reset(section uint64, lastSectionHead common.Hash) error {\n\tgen, err := bloombits.NewGenerator(uint(b.size))\n\tb.gen, b.section, b.head = gen, section, common.Hash{}\n\treturn err\n}", "func (d *state) Reset() {\n\t// Zero the permutation's state.\n\tfor i := range d.a {\n\t\td.a[i] = 0\n\t}\n\td.state = spongeAbsorbing\n\td.buf = d.storage[:0]\n}", "func (m *UserMutation) ResetAuthHistories() {\n\tm.authHistories = nil\n\tm.clearedauthHistories = false\n\tm.removedauthHistories = nil\n}", "func (c *PrometheusCollector) Reset() {\n\tc.RWMutex.Lock()\n\tdefer c.RWMutex.Unlock()\n}", "func (ms SummaryDataPoint) InitEmpty() {\n\t*ms.orig = &otlpmetrics.SummaryDataPoint{}\n}", "func (m *AuditLogMutation) ResetMutationData() {\n\tm.mutation_data = nil\n}", "func (ht *HashTable) Clear() {\n\tfor i := range ht.table {\n\t\tht.table[i] = hashEntry{}\n\t}\n}", "func (self *Weights) reset() {\n\tif len(self.Scale)>0 {\n\t\tself.Scale = self.Scale[:0]\n\t}\n}", "func (r *PackageAggRow) ClearCount() { r.Data.Count = nil }", "func (s *Stack) Reset() {\n\ts.data = nil\n}", "func (mb *MutableBag) Reset() {\n\t// my kingdom for a clear method on maps!\n\tfor k := range mb.values {\n\t\tdelete(mb.values, k)\n\t}\n}", "func (a *Array) Reset() {\n\ta.Value = nil\n\ta.Initialized = false\n\tArrayPool.Put(a)\n}", "func (b *MemoryBackend) Reset() {\n\tb.mu.Lock()\n\tdefer b.mu.Unlock()\n\n\tb.Notices = b.Notices[:0]\n}", "func (r *statusesRepository) Reset() {\n\tr.statuses = nil\n}", "func (t *Tags) Reset(values []Tag) {\n\tt.values = values\n}", "func (a *Array) Reset() {\n\tfor i := range a.bits {\n\t\ta.bits[i] = 0\n\t}\n\n\ta.bits = a.bits[0:1]\n\ta.length = 0\n}", "func (ref *digest) Reset() {\n\tref.ptr = 0\n\n\tref.h[0][0] = uint64(8 * HashSize)\n\tref.h[0][1] = 0\n\tref.h[1][0] = uint64(8 * HashSize)\n\tref.h[1][1] = 0\n\tref.h[2][0] = uint64(8 * HashSize)\n\tref.h[2][1] = 0\n\tref.h[3][0] = uint64(8 * HashSize)\n\tref.h[3][1] = 0\n\tref.h[4][0] = uint64(8 * HashSize)\n\tref.h[4][1] = 0\n\tref.h[5][0] = uint64(8 * HashSize)\n\tref.h[5][1] = 0\n\tref.h[6][0] = uint64(8 * HashSize)\n\tref.h[6][1] = 0\n\tref.h[7][0] = uint64(8 * HashSize)\n\tref.h[7][1] = 0\n\n\tmemset32(ref.c[:], 0)\n}", "func (m *RestaurantMutation) ClearHistories() {\n\tm.clearedhistories = true\n}", "func (th *TxHistory) Clear() {\n\tfor i := 0; i < dbtypes.NumIntervals; i++ {\n\t\tth.TypeByInterval[i] = nil\n\t\tth.AmtFlowByInterval[i] = nil\n\t}\n}", "func (c *Clac) Reset() error {\n\tc.working = Stack{}\n\tc.hist = newStackHist()\n\treturn ErrNoHistUpdate\n}", "func (hpack *HPACK) Reset() {\n\thpack.releaseTable()\n\thpack.releaseFields()\n\thpack.tableSize = 0\n\thpack.maxTableSize = int(defaultHeaderTableSize)\n\thpack.DisableCompression = false\n}", "func (s *MockStore) Reset() {\n\ts.Data = map[string]map[string]string{}\n}", "func (b *batch) Reset() {\n\tb.writes = b.writes[:0]\n\tb.size = 0\n}", "func (m *UserMutation) ResetJobHistories() {\n\tm.jobHistories = nil\n\tm.clearedjobHistories = false\n\tm.removedjobHistories = nil\n}", "func (m *BarTimeRangeMutation) ResetCount() {\n\tm.count = nil\n\tm.addcount = nil\n}" ]
[ "0.658996", "0.64232826", "0.6421033", "0.6413384", "0.62674403", "0.62575907", "0.6256979", "0.6256893", "0.6247013", "0.6219137", "0.6209987", "0.61977667", "0.6099206", "0.6053001", "0.6011185", "0.6005332", "0.60049397", "0.5980429", "0.5969854", "0.59510183", "0.59492487", "0.59087515", "0.5898672", "0.5885913", "0.58841354", "0.5871771", "0.5851984", "0.5845106", "0.58389556", "0.5835112", "0.58328444", "0.5831993", "0.5823467", "0.5818879", "0.58103937", "0.5787937", "0.57836527", "0.57783663", "0.57606053", "0.57577425", "0.57414067", "0.5733819", "0.5729765", "0.5726557", "0.5692525", "0.56865215", "0.566995", "0.56615865", "0.56590885", "0.5647033", "0.5647033", "0.56408167", "0.56366277", "0.5628814", "0.56272143", "0.56187606", "0.56182367", "0.5613837", "0.56056917", "0.55813205", "0.5577862", "0.55737495", "0.55588543", "0.5556921", "0.5547688", "0.5545413", "0.55372715", "0.55344987", "0.55343604", "0.5531918", "0.5527823", "0.5525475", "0.5519538", "0.5516254", "0.5503084", "0.54944026", "0.54875034", "0.5477766", "0.5477007", "0.5473479", "0.54544955", "0.5453787", "0.54524803", "0.54420096", "0.5441047", "0.5439458", "0.5436584", "0.54335904", "0.54195726", "0.541797", "0.5416533", "0.54063934", "0.54035574", "0.5403209", "0.5392495", "0.5391365", "0.5389623", "0.53852606", "0.53833044", "0.5374436" ]
0.80136377
0
Clone returns a copy of the histogram.
func (h *Histogram) Clone() *Histogram { copy := NewHistogram(h.Offset, h.Divider) copy.CopyFrom(h) return copy }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Copy(h *hdrhistogram.Histogram) *hdrhistogram.Histogram {\n\tdup := hdrhistogram.New(h.LowestTrackableValue(), h.HighestTrackableValue(),\n\t\tint(h.SignificantFigures()))\n\tdup.Merge(h)\n\treturn dup\n}", "func NewHistogram() *Histogram {\n\treturn &Histogram{make([]int, HistogramSize), &sync.Mutex{}}\n}", "func (h *Histogram) CopyFrom(src *Histogram) {\n\th.Counter = src.Counter\n\th.copyHDataFrom(src)\n}", "func (h *PrometheusInstrumentHandler) Clone() model.Part {\n\th0 := *h\n\treturn &h0\n}", "func (rb *RingBuffer) Clone() *RingBuffer {\n\trb.lock.RLock()\n\tdefer rb.lock.RUnlock()\n\tcp := make([]stats.Record, len(rb.data))\n\tcopy(cp, rb.data)\n\treturn &RingBuffer{seq: rb.seq, data: cp}\n}", "func NewHistogram(labels map[string]string) *Histogram {\n\treturn &Histogram{\n\t\tLabels: labels,\n\t\tBuckets: make(map[string]int),\n\t}\n}", "func (r *Recorder) Histogram(ctx context.Context, tconn *chrome.TestConn) ([]*Histogram, error) {\n\tnames := r.names()\n\n\ts, err := GetHistograms(ctx, tconn, names)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"failed to get snapshot\")\n\t}\n\n\treturn DiffHistograms(r.snapshot, s)\n}", "func (bv Values) Clone() Values {\n\tif n := len(bv); n > 0 {\n\t\tvalues := make(Values, n)\n\t\tcopy(values, bv)\n\t\treturn values\n\t}\n\n\treturn NewValues()\n}", "func (c *ColoredBalances) Clone() *ColoredBalances {\n\tcopiedBalances := orderedmap.New()\n\tc.balances.ForEach(copiedBalances.Set)\n\n\treturn &ColoredBalances{\n\t\tbalances: copiedBalances,\n\t}\n}", "func (bench *Stopwatch) Histogram(binCount int) *Histogram {\n\tbench.mustBeCompleted()\n\n\topts := defaultOptions\n\topts.BinCount = binCount\n\n\treturn NewDurationHistogram(bench.Durations(), &opts)\n}", "func (c *Aggregator) Histogram() (aggregation.Buckets, error) {\n\treturn aggregation.Buckets{\n\t\tBoundaries: c.boundaries,\n\t\tCounts: c.state.bucketCounts,\n\t}, nil\n}", "func NewHistogram(Offset float64, Divider float64) *Histogram {\n\th := new(Histogram)\n\th.Offset = Offset\n\tif Divider == 0 {\n\t\treturn nil\n\t}\n\th.Divider = Divider\n\th.Hdata = make([]int32, numBuckets)\n\treturn h\n}", "func (r Registers) Clone() Registers {\n\tregs := make(Registers)\n\n\tfor k, v := range r {\n\t\tregs[k] = v\n\t}\n\n\treturn regs\n}", "func (bindings Bindings) Clone() *Bindings {\n\tresult := &Bindings{\n\t\tshared: true,\n\t\tValues: bindings.Values,\n\t}\n\tbindings.shared = true\n\n\treturn result\n}", "func (h *hash) Copy() *hash {\n\tif h == nil {\n\t\treturn nil\n\t}\n\n\th.RLock()\n\tdefer h.RUnlock()\n\n\tfresh := New()\n\tfor k, v := range h.m {\n\t\tfresh.m[k] = v.Copy()\n\t}\n\n\treturn fresh\n}", "func (mu MuHash) Clone() *MuHash {\n\treturn &mu\n}", "func (rn RangedNumber) Clone() *RangedNumber {\n\treturn &rn\n}", "func (s *HashSet) Clone() *HashSet {\n\tresult := NewHashSet()\n\tresult.AddSet(s)\n\treturn result\n}", "func (e *Exporter) NewHistogram(name, help string, cutoffs []int64) *stats.Histogram {\n\tif e.name == \"\" || name == \"\" {\n\t\tv := stats.NewHistogram(name, help, cutoffs)\n\t\taddUnnamedExport(name, v)\n\t\treturn v\n\t}\n\thist := stats.NewHistogram(\"\", help, cutoffs)\n\te.addToOtherVars(name, hist)\n\treturn hist\n}", "func (b Backoff) Clone() Backoff {\n\treturn Backoff{\n\t\tstep: b.step,\n\t\tMaxBackoff: b.MaxBackoff,\n\t\tMinBackoff: b.MinBackoff,\n\t\tJitter: b.Jitter,\n\t\tExpFactor: b.ExpFactor,\n\t}\n}", "func NewHistogram(opts HistogramOptions) *Histogram {\n\tif opts.NumBuckets == 0 {\n\t\topts.NumBuckets = 32\n\t}\n\tif opts.BaseBucketSize == 0.0 {\n\t\topts.BaseBucketSize = 1.0\n\t}\n\th := Histogram{\n\t\topts: opts,\n\t\tbuckets: make([]bucketInternal, opts.NumBuckets),\n\t\tcount: newCounter(),\n\t\tsum: newCounter(),\n\t\tsumOfSquares: newCounter(),\n\t\ttracker: newTracker(),\n\n\t\tlogBaseBucketSize: math.Log(opts.BaseBucketSize),\n\t\toneOverLogOnePlusGrowthFactor: 1 / math.Log(1+opts.GrowthFactor),\n\t}\n\tm := 1.0 + opts.GrowthFactor\n\tdelta := opts.BaseBucketSize\n\th.buckets[0].lowBound = float64(opts.MinValue)\n\th.buckets[0].count = newCounter()\n\tfor i := 1; i < opts.NumBuckets; i++ {\n\t\th.buckets[i].lowBound = float64(opts.MinValue) + delta\n\t\th.buckets[i].count = newCounter()\n\t\tdelta = delta * m\n\t}\n\treturn &h\n}", "func (v *Values) Clone() *Values {\n\tv.lock.RLock()\n\tdefer v.lock.RUnlock()\n\n\treturn newValues(v.root)\n}", "func (r *Range) Clone() *Range {\n\tif r == nil {\n\t\treturn nil\n\t}\n\tother := *r\n\tother.X = CloneExpr(r.X)\n\tother.Y = CloneExpr(r.Y)\n\treturn &other\n}", "func NewClone() *Clone {\n\treturn &Clone{\n\t\tkeys: make(map[string][]byte),\n\t}\n}", "func NewHistogram(name string, options ...Option) Histogram {\n\treturn newHistogram(name, options...)\n}", "func (m *Mocker) Clone(t *testing.T) (clone *Mocker) {\n\tm.Close()\n\n\tclone = New(t)\n\n\tclone.handlers = m.deepCopyHandlers()\n\n\treturn\n}", "func (b BoundingBox) Clone() BoundingBoxer {\n\treturn b\n}", "func (c Collector) Clone() Collector {\n\tcol := make(Collector)\n\tcol.Copy(c)\n\treturn col\n}", "func (ms HistogramBucket) CopyTo(dest HistogramBucket) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetCount(ms.Count())\n\tms.Exemplar().CopyTo(dest.Exemplar())\n}", "func NewHistogram(w io.Writer, key string, interval time.Duration) metrics.Histogram {\n\th := make(chan string)\n\tgo fwd(w, key, interval, h)\n\treturn statsdHistogram(h)\n}", "func (h *Histogram) Transfer(src *Histogram) {\n\tif src.Count == 0 {\n\t\treturn\n\t}\n\tif h.Count == 0 {\n\t\th.CopyFrom(src)\n\t\tsrc.Reset()\n\t\treturn\n\t}\n\th.copyHDataFrom(src)\n\th.Counter.Transfer(&src.Counter)\n\tsrc.Reset()\n}", "func (l *universalLister) Clone() *universalLister {\n\tvar clonedLister universalLister\n\n\tclonedLister.resourceType = l.resourceType\n\tclonedLister.tableName = l.tableName\n\tclonedLister.selectedColumns = l.selectedColumns\n\tclonedLister.tenantColumn = l.tenantColumn\n\tclonedLister.orderByParams = append(clonedLister.orderByParams, l.orderByParams...)\n\n\treturn &clonedLister\n}", "func (s *FKCascadeRuntimeStats) Clone() execdetails.RuntimeStats {\n\tnewRs := &FKCascadeRuntimeStats{\n\t\tTotal: s.Total,\n\t\tKeys: s.Keys,\n\t}\n\treturn newRs\n}", "func (h *Header) Clone() *Header {\n\thc := &Header{slice: make([]string, len(h.slice))}\n\tcopy(hc.slice, h.slice)\n\treturn hc\n}", "func (b *Bound) Clone() *Bound {\n\treturn NewBoundFromPoints(b.sw, b.ne)\n}", "func (s *fseEncoder) Histogram() *[256]uint32 {\n\treturn &s.count\n}", "func (v Data) Clone() Data {\n\tnv := make(Data, len(v))\n\tcopy(nv, v)\n\n\treturn nv\n}", "func (ns *nodeStats) clone() nodeStats {\n\treturn nodeStats{\n\t\tConnectionsAttempts: atomic.LoadInt64(&ns.ConnectionsAttempts),\n\t\tConnectionsSuccessful: atomic.LoadInt64(&ns.ConnectionsSuccessful),\n\t\tConnectionsFailed: atomic.LoadInt64(&ns.ConnectionsFailed),\n\t\tConnectionsPoolEmpty: atomic.LoadInt64(&ns.ConnectionsPoolEmpty),\n\t\tConnectionsOpen: atomic.LoadInt64(&ns.ConnectionsOpen),\n\t\tTendsTotal: atomic.LoadInt64(&ns.TendsTotal),\n\t\tTendsSuccessful: atomic.LoadInt64(&ns.TendsSuccessful),\n\t\tTendsFailed: atomic.LoadInt64(&ns.TendsFailed),\n\t\tPartitionMapUpdates: atomic.LoadInt64(&ns.PartitionMapUpdates),\n\t\tNodeAdded: atomic.LoadInt64(&ns.NodeAdded),\n\t\tNodeRemoved: atomic.LoadInt64(&ns.NodeRemoved),\n\t}\n}", "func (o *ConsumableOutput) Clone() *ConsumableOutput {\n\tret := &ConsumableOutput{\n\t\toutput: o.output.Clone(),\n\t\tremaining: make(map[ledgerstate.Color]uint64),\n\t\twasConsumed: o.wasConsumed,\n\t}\n\tfor col, bal := range o.remaining {\n\t\tret.remaining[col] = bal\n\t}\n\treturn ret\n}", "func clone(s *Scroller) *Scroller {\n\tclone := &Scroller{\n\t\tpos: s.pos,\n\t\tline: s.line,\n\t\toffset: s.offset,\n\t\tdir: s.dir,\n\t\tscrolled: s.scrolled,\n\t\teditor: s.editor,\n\t\tctrl: s.ctrl,\n\t}\n\tfor _, h := range s.scrolled {\n\t\tclone.scrolled = append(clone.scrolled, h)\n\t}\n\treturn clone\n}", "func (d *dataUsageCache) clone() dataUsageCache {\n\tclone := dataUsageCache{\n\t\tInfo: d.Info,\n\t\tCache: make(map[string]dataUsageEntry, len(d.Cache)),\n\t}\n\tfor k, v := range d.Cache {\n\t\tclone.Cache[k] = v\n\t}\n\treturn clone\n}", "func (s *preFilterState) Clone() *preFilterState {\n\tif s == nil {\n\t\treturn nil\n\t}\n\n\tcopy := preFilterState{}\n\tcopy.affinityCounts = s.affinityCounts.clone()\n\tcopy.antiAffinityCounts = s.antiAffinityCounts.clone()\n\tcopy.existingAntiAffinityCounts = s.existingAntiAffinityCounts.clone()\n\t// No need to deep copy the podInfo because it shouldn't change.\n\tcopy.podInfo = s.podInfo\n\tcopy.namespaceLabels = s.namespaceLabels\n\treturn &copy\n}", "func (t *Analysis) Clone() *Analysis {\n\tshadow := new(Analysis)\n\n\tshadow.Status = t.Status\n\n\tshadow.Living = make([]life.Location, len(t.Living))\n\tcopy(shadow.Living, t.Living)\n\n\tshadow.Changes = make([]changedLocation, len(t.Changes))\n\tcopy(shadow.Changes, t.Changes)\n\n\treturn shadow\n}", "func (pi *PixelIterator) Clone() *PixelIterator {\n\tret := newPixelIterator(C.ClonePixelIterator(pi.pi))\n\truntime.KeepAlive(pi)\n\treturn ret\n}", "func (i *Index) Clone() *Index {\n\t// create clone\n\tclone := &Index{\n\t\tbtree: i.btree.Copy(),\n\t}\n\n\treturn clone\n}", "func (p Properties) Clone() Properties {\n\tto := make(Properties, len(p))\n\tfor k, v := range p {\n\t\tto[k] = v\n\t}\n\treturn to\n}", "func (fce *filteredConsoleEncoder) Clone() zapcore.Encoder {\n\treturn &filteredConsoleEncoder{\n\t\tEncoderConfig: fce.EncoderConfig,\n\t\tEncoder: fce.Encoder.Clone(),\n\t\tconsoleEncoder: fce.consoleEncoder.Clone(),\n\t\tstacktraceLength: fce.stacktraceLength,\n\t}\n}", "func (d *WindowDefinition) Clone() *WindowDefinition {\n\tif d == nil {\n\t\treturn nil\n\t}\n\tother := *d\n\tother.Base = d.Base.Clone()\n\tother.Partitions = cloneExprs(d.Partitions)\n\tother.OrderingTerms = cloneOrderingTerms(d.OrderingTerms)\n\tother.Frame = d.Frame.Clone()\n\treturn &other\n}", "func (m *Manifest) Clone() *Manifest {\n\tm2 := &Manifest{\n\t\tID: 0,\n\t\tBaseID: m.ID,\n\t\tNumDocs: m.NumDocs,\n\t\tNumDeletedDocs: m.NumDeletedDocs,\n\t\tNumItems: m.NumItems,\n\t\tChecksum: m.Checksum,\n\t\tSegments: make(map[uint32]*Segment, len(m.Segments)),\n\t\taddedSegments: make(map[uint32]struct{}),\n\t\tremovedSegments: make(map[uint32]struct{}),\n\t}\n\tfor id, segment := range m.Segments {\n\t\tm2.Segments[id] = segment.Clone()\n\t}\n\treturn m2\n}", "func NewHistogram(config HistogramConfig) (objectspec.InstrumentorHistogram, error) {\n\tnewHistogram := &histogram{\n\t\tHistogramConfig: config,\n\t}\n\n\tif len(newHistogram.Buckets) == 0 {\n\t\treturn nil, maskAnyf(invalidConfigError, \"buckets must not be empty\")\n\t}\n\tif newHistogram.Help == \"\" {\n\t\treturn nil, maskAnyf(invalidConfigError, \"help must not be empty\")\n\t}\n\tif newHistogram.Name == \"\" {\n\t\treturn nil, maskAnyf(invalidConfigError, \"name must not be empty\")\n\t}\n\n\tnewHistogram.ClientHistogram = prometheusclient.NewHistogram(prometheusclient.HistogramOpts{\n\t\tBuckets: newHistogram.Buckets,\n\t\tHelp: newHistogram.Help,\n\t\tName: newHistogram.Name,\n\t})\n\n\treturn newHistogram, nil\n}", "func (m *Metrics) Histogram(key string) *metrics.Histogram {\n\tm.Lock()\n\tdefer m.Unlock()\n\thist, ok := m.histograms[key]\n\tif !ok {\n\t\thist = metrics.NewHistogram(key, 0, 1e8, 5)\n\t\tm.histograms[key] = hist\n\t}\n\treturn hist\n}", "func (p Properties) Clone() (clone Properties) {\n\n\tfor k, v := range p {\n\t\tclone[k] = v\n\t}\n\treturn\n}", "func (p *Pie) Clone(generateNewID bool) *Pie {\n\tcloned := *p\n\tif generateNewID {\n\t\tcloned.Id = bson.NewObjectId()\n\t}\n\tcloned.Slices = make([]Slice, len(p.Slices))\n\tcopy(cloned.Slices, p.Slices)\n\treturn &cloned\n}", "func (f *FilterBetween) Clone() *FilterBetween {\n\tclone := &FilterBetween{\n\t\tField: f.Field,\n\t\tOperator: f.Operator,\n\t\tValueLeft: f.ValueLeft,\n\t\tValueRight: f.ValueRight,\n\t}\n\treturn clone\n}", "func NewHistogram(name, help string, cutoffs []int64) *Histogram {\n\tlabels := make([]string, len(cutoffs)+1)\n\tfor i, v := range cutoffs {\n\t\tlabels[i] = fmt.Sprintf(\"%d\", v)\n\t}\n\tlabels[len(labels)-1] = \"inf\"\n\treturn NewGenericHistogram(name, help, cutoffs, labels, \"Count\", \"Total\")\n}", "func (mm Uint64Uint64Map) Clone() Uint64Uint64Map {\n\tresult := NewUint64Uint64Map()\n\tfor k, v := range mm {\n\t\tresult[k] = v\n\t}\n\treturn result\n}", "func (t *OrderingTerm) Clone() *OrderingTerm {\n\tif t == nil {\n\t\treturn nil\n\t}\n\tother := *t\n\tother.X = CloneExpr(t.X)\n\treturn &other\n}", "func (w *Wrapper) Clone() *Wrapper {\n\treturn w.cloning(false)\n}", "func (m Metadata) Clone() Metadata {\n\tmd := Metadata{}\n\tfor k, v := range m {\n\t\tmd[k] = v\n\t}\n\treturn md\n}", "func (i *Number) Clone() *Number {\n\ttmp := NewNumber(*i)\n\treturn tmp\n}", "func (es HistogramBucketSlice) CopyTo(dest HistogramBucketSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint_Bucket(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func (w *Window) Clone() *Window {\n\tif w == nil {\n\t\treturn nil\n\t}\n\tother := *w\n\tother.Name = w.Name.Clone()\n\tother.Definition = w.Definition.Clone()\n\treturn &other\n}", "func (l *logger) Clone() Logger {\n\tvar clone *logger = New()\n\tclone.SetDefaults(l.Defaults())\n\tclone.AddFields(l.Fields())\n\treturn clone\n}", "func (r *View) Clone() *View {\n\treturn r.CloneLimit(r.size)\n}", "func (ms HistogramDataPoint) CopyTo(dest HistogramDataPoint) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.LabelsMap().CopyTo(dest.LabelsMap())\n\tdest.SetStartTime(ms.StartTime())\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetCount(ms.Count())\n\tdest.SetSum(ms.Sum())\n\tms.Buckets().CopyTo(dest.Buckets())\n\tdest.SetExplicitBounds(ms.ExplicitBounds())\n}", "func (NoopProvider) Histogram(_ string, _ Buckets, _ ...string) Histogram {\n\treturn noopInstrument{}\n}", "func (c Count) Copy() Count {\n\tc2 := Count{}\n\tfor element, count := range c {\n\t\tc2[element] = count\n\t}\n\treturn c2\n}", "func (s *FKCheckRuntimeStats) Clone() execdetails.RuntimeStats {\n\tnewRs := &FKCheckRuntimeStats{\n\t\tTotal: s.Total,\n\t\tCheck: s.Check,\n\t\tLock: s.Lock,\n\t\tKeys: s.Keys,\n\t}\n\treturn newRs\n}", "func (s Segments) Clone() Segments {\n\tres := make(Segments, len(s))\n\tcopy(res, s)\n\treturn res\n}", "func (p Page) Clone() Page {\n\tclone := make([]Section, len(p))\n\tfor i, section := range p {\n\t\tclone[i] = section.Clone()\n\t}\n\treturn clone\n}", "func (ms *MySlice) Clone() Data {\n\tscopy := make([]int, len(ms.Slice))\n\tcopy(scopy, ms.Slice)\n\tmyslice := new(MySlice)\n\tmyslice.Slice = scopy\n\treturn Data(myslice)\n}", "func (hq *HarborQuery) Clone() *HarborQuery {\n\tif hq == nil {\n\t\treturn nil\n\t}\n\treturn &HarborQuery{\n\t\tconfig: hq.config,\n\t\tlimit: hq.limit,\n\t\toffset: hq.offset,\n\t\torder: append([]OrderFunc{}, hq.order...),\n\t\tpredicates: append([]predicate.Harbor{}, hq.predicates...),\n\t\twithSettlement: hq.withSettlement.Clone(),\n\t\t// clone intermediate query.\n\t\tsql: hq.sql.Clone(),\n\t\tpath: hq.path,\n\t}\n}", "func (s *Store) Clone() map[string]Entry {\n\tcp := make(map[string]Entry)\n\ts.RLock()\n\tfor k, v := range s.data {\n\t\tcp[k] = v\n\t}\n\ts.RUnlock()\n\treturn cp\n}", "func (i *LevelIterator) Clone() LevelIterator {\n\tif i.iter.r == nil {\n\t\treturn *i\n\t}\n\t// The start and end iterators are not cloned and are treated as\n\t// immutable.\n\treturn LevelIterator{\n\t\titer: i.iter.clone(),\n\t\tstart: i.start,\n\t\tend: i.end,\n\t\tfilter: i.filter,\n\t}\n}", "func (h *Histogram) filterHistogram(newBuckets []Bucket) *Histogram {\n\tcheckBucketsValid(newBuckets)\n\n\ttotal := int64(0)\n\tfor _, b := range newBuckets {\n\t\ttotal += b.NumEq + b.NumRange\n\t}\n\n\tif total == 0 {\n\t\treturn &Histogram{}\n\t}\n\n\tselectivity := float64(total) / float64(h.RowCount)\n\n\t// Estimate the new DistinctCount based on the selectivity of this filter.\n\t// todo(rytaft): this could be more precise if we take into account the\n\t// null count of the original histogram. This could also be more precise for\n\t// the operators =, !=, in, and not in, since we know how these operators\n\t// should affect the distinct count.\n\tdistinctCount := int64(float64(h.DistinctCount) * selectivity)\n\tif distinctCount == 0 {\n\t\t// There must be at least one distinct value since RowCount > 0.\n\t\tdistinctCount++\n\t}\n\n\treturn &Histogram{\n\t\tRowCount: total,\n\t\tDistinctCount: distinctCount,\n\n\t\t// All the returned rows will be non-null for this column.\n\t\tNullCount: 0,\n\t\tBuckets: newBuckets,\n\t}\n}", "func (c *Counter) copy() Metric {\n\treturn &Counter{value: atomic.LoadUint64(&c.value), name: c.name}\n}", "func (ws *Words) Clone() *Words {\n\tif ws.Len() == 0 {\n\t\treturn nil\n\t}\n\twss := make([]*Word, len(ws.words), cap(ws.words))\n\tcopy(wss, ws.words)\n\treturn NewWords(wss)\n}", "func (c *Client) Clone() *Client {\n\tnc := *c\n\tnc.h = nil\n\tif len(c.h) > 0 {\n\t\tnc.h = c.h.Clone()\n\t}\n\treturn &nc\n}", "func (it *AllIterator) Clone() graph.Iterator {\n\tout := NewAllIterator(it.db, it.dir, it.qs)\n\tout.tags.CopyFrom(it)\n\treturn out\n}", "func (nsq *NamespaceSecretQuery) Clone() *NamespaceSecretQuery {\n\tif nsq == nil {\n\t\treturn nil\n\t}\n\treturn &NamespaceSecretQuery{\n\t\tconfig: nsq.config,\n\t\tlimit: nsq.limit,\n\t\toffset: nsq.offset,\n\t\torder: append([]OrderFunc{}, nsq.order...),\n\t\tpredicates: append([]predicate.NamespaceSecret{}, nsq.predicates...),\n\t\t// clone intermediate query.\n\t\tsql: nsq.sql.Clone(),\n\t\tpath: nsq.path,\n\t}\n}", "func (v Int) Clone() Node {\n\treturn v\n}", "func (s *Spec) Clone() *Spec {\n\tres := &Spec{Target: make(map[string]string)}\n\tfor k, v := range s.Target {\n\t\tres.Target[k] = v\n\t}\n\tfor _, app := range s.Apps {\n\t\tres.Apps = append(res.Apps, app.Clone())\n\t}\n\treturn res\n}", "func (h *Histogram) copyHDataFrom(src *Histogram) {\n\tif h.Divider == src.Divider && h.Offset == src.Offset {\n\t\tfor i := 0; i < len(h.Hdata); i++ {\n\t\t\th.Hdata[i] += src.Hdata[i]\n\t\t}\n\t\treturn\n\t}\n\n\thData := src.Export()\n\tfor _, data := range hData.Data {\n\t\th.record((data.Start+data.End)/2, int(data.Count))\n\t}\n}", "func (v Vector) clone() Vector {\n\tclone := v\n\tclone.data = make(map[int]float64)\n\tfor n, d := range v.data {\n\t\tclone.data[n] = d\n\t}\n\n\treturn clone\n}", "func (v IntVec) Clone() IntVec {\n\tnv := make(IntVec, len(v))\n\tcopy(nv, v)\n\n\treturn nv\n}", "func (bt *BinarySearchTree) Clone() *BinarySearchTree {\n\tt := &TreeNode{Val: bt.root.Val}\n\tclone(bt.root, t)\n\treturn &BinarySearchTree{root: t}\n}", "func (v *VersionVector) Clone() *VersionVector {\n\tdots := make(Dots)\n\n\tv.l.RLock()\n\tfor actor, t := range v.dots {\n\t\tdots[actor] = t\n\t}\n\tv.l.RUnlock()\n\n\treturn &VersionVector{\n\t\tdots: dots,\n\t}\n}", "func (c Container) Clone() Container {\n\tif n := len(c); n > 0 {\n\t\tvalues := make(Container, n, n)\n\t\tcopy(values, c)\n\t\treturn values\n\t}\n\treturn NewContainer()\n}", "func (c Counters) Copy() Counters {\n\tresult := Counters{}\n\tfor k, v := range c {\n\t\tresult[k] = v\n\t}\n\treturn result\n}", "func (a *Attributes) Clone() *Attributes {\n\treturn NewAttributesFrom(a)\n}", "func (tri *Triangle) Clone() *Triangle {\n\tnewTri := NewTriangle(tri.Mesh)\n\tfor _, vertex := range tri.Vertices {\n\t\tnewTri.SetVertices(vertex.Clone())\n\t}\n\tnewTri.RecalculateCenter()\n\treturn newTri\n}", "func (tdw *TokenDataWitness) Clone() *TokenDataWitness {\n\treturn &TokenDataWitness{\n\t\tType: tdw.Type,\n\t\tValue: tdw.Value.Copy(),\n\t\tBlindingFactor: tdw.BlindingFactor.Copy(),\n\t}\n}", "func (board *Board) Clone() *Board {\n\tclone := NewBoard()\n\t*clone = *board\n\tclone.previous = board\n\tclone.g = board.g + 1\n\treturn clone\n}", "func (kvs KeyValues) Clone() KeyValues {\n\tvar dst = make([]*protoMetricsV1.KeyValue, len(kvs))\n\tfor i := range kvs {\n\t\tdst[i] = &protoMetricsV1.KeyValue{\n\t\t\tKey: kvs[i].Key,\n\t\t\tValue: kvs[i].Value,\n\t\t}\n\t}\n\treturn dst\n}", "func (l Set) Clone() Set {\n\ts := make(map[Instance]struct{})\n\tfor k, v := range l {\n\t\ts[k] = v\n\t}\n\n\treturn s\n}", "func (p *Policy) ShallowClone() *Policy {\n\treturn &Policy{\n\t\tsentinelPolicy: p.sentinelPolicy,\n\t\tName: p.Name,\n\t\tPaths: p.Paths,\n\t\tRaw: p.Raw,\n\t\tType: p.Type,\n\t\tTemplated: p.Templated,\n\t\tnamespace: p.namespace,\n\t}\n}", "func (h *EventsHandlers)Clone(e interface{})(r []IAppEventHandler){\n\th.RLock()\n\tdefer h.RUnlock()\n\tif old, ok := h.handlers[e]; ok{\n\t\tif len(old) > 0{\n\t\t\tr = make([]IAppEventHandler, len(old))\n\t\t\tcopy(r, old)\n\t\t\treturn r\n\t\t}\n\t}\n\treturn nil\n}", "func (es HistogramDataPointSlice) CopyTo(dest HistogramDataPointSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramDataPoint(&el).CopyTo(newHistogramDataPoint(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramDataPoint(&el).CopyTo(newHistogramDataPoint(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func (b ConstantBackoff) Clone() Policy {\n\treturn &ConstantBackoff{\n\t\tMaxRetries: b.MaxRetries,\n\t\tTime: b.Time,\n\t\tTimeUnit: b.TimeUnit,\n\t}\n}", "func (i *Instance) Clone() *Instance {\n\tclone := &Instance{}\n\tclone.TargetValue, clone.FeatureValues = i.TargetValue, make(map[string]Feature, len(i.FeatureValues))\n\tfor k, v := range i.FeatureValues {\n\t\tclone.FeatureValues[k] = v\n\t}\n\treturn clone\n}" ]
[ "0.70470625", "0.66266876", "0.6155141", "0.60981065", "0.6065608", "0.59231615", "0.5901118", "0.5894917", "0.588816", "0.58879507", "0.5879107", "0.5805754", "0.5793327", "0.5775184", "0.5770935", "0.5766878", "0.5758966", "0.5743485", "0.57046074", "0.5688048", "0.56879115", "0.5684451", "0.56707907", "0.5669069", "0.5667777", "0.56404394", "0.5635149", "0.5625626", "0.56075126", "0.55821645", "0.5568485", "0.55448234", "0.5540468", "0.55242217", "0.5512554", "0.55037993", "0.5502514", "0.54885864", "0.5487257", "0.54709154", "0.5466079", "0.5458199", "0.54444474", "0.5437055", "0.54239213", "0.5416214", "0.5409387", "0.54051983", "0.5401652", "0.53934425", "0.5383982", "0.5367419", "0.53644943", "0.5360348", "0.5359534", "0.5354872", "0.5352934", "0.53486377", "0.5345707", "0.53420573", "0.5340224", "0.5329492", "0.5321065", "0.531966", "0.5317128", "0.5313445", "0.5308196", "0.5306545", "0.53057945", "0.5301253", "0.52984035", "0.5282167", "0.52793163", "0.5274967", "0.5274003", "0.5271164", "0.5270324", "0.52702194", "0.5266025", "0.5264108", "0.5258467", "0.52563477", "0.525055", "0.52450466", "0.5240769", "0.52374786", "0.523476", "0.5227453", "0.5213186", "0.5210694", "0.520803", "0.5207819", "0.5203752", "0.5196569", "0.51959133", "0.5195529", "0.5190594", "0.5188704", "0.51886094", "0.5183077" ]
0.85082644
0
CopyFrom sets the content of this object to a copy of the src.
func (h *Histogram) CopyFrom(src *Histogram) { h.Counter = src.Counter h.copyHDataFrom(src) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Copy(dst interface{}, src interface{}) (err error) {\n\tdstValue := reflect.ValueOf(dst)\n\tif dstValue.Kind() != reflect.Ptr {\n\t\terr = errors.New(\"dst isn't a pointer to struct\")\n\t\treturn\n\t}\n\tdstElem := dstValue.Elem()\n\tif dstElem.Kind() != reflect.Struct {\n\t\terr = errors.New(\"pointer doesn't point to struct\")\n\t\treturn\n\t}\n\n\tsrcValue := reflect.ValueOf(src)\n\tsrcType := reflect.TypeOf(src)\n\tif srcType.Kind() != reflect.Struct {\n\t\terr = errors.New(\"src isn't struct\")\n\t\treturn\n\t}\n\n\tfor i := 0; i < srcType.NumField(); i++ {\n\t\tsf := srcType.Field(i)\n\t\tsv := srcValue.FieldByName(sf.Name)\n\t\t// make sure the value which in dst is valid and can set\n\t\tif dv := dstElem.FieldByName(sf.Name); dv.IsValid() && dv.CanSet() {\n\t\t\tdv.Set(sv)\n\t\t}\n\t}\n\treturn\n}", "func Copy(dst, src interface{}) error {\n\tbuffer := new(bytes.Buffer)\n\tencoder := gob.NewEncoder(buffer)\n\tif err := encoder.Encode(src); err != nil {\n\t\treturn err\n\t}\n\tdecoder := gob.NewDecoder(buffer)\n\terr := decoder.Decode(dst)\n\treturn err\n}", "func (p *Pinger) SetSrc(src net.IP) (net.IP, error) {\r\n\tif src == nil {\r\n\t\treturn nil, errors.New(\"src must not be nil\")\r\n\t}\r\n\tp.src = src\r\n\treturn p.src, nil\r\n}", "func Copy(dst interface{}, src interface{}) error {\n\tif dst == nil {\n\t\treturn fmt.Errorf(\"dst cannot be nil\")\n\t}\n\n\tif src == nil {\n\t\treturn fmt.Errorf(\"src cannot be nil\")\n\t}\n\n\tbytes, err := json.Marshal(src)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to marshal src: %s\", err)\n\t}\n\n\terr = json.Unmarshal(bytes, dst)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to unmarshal into dst: %s\", err)\n\t}\n\n\treturn nil\n}", "func Copy(dst, src interface{}) interface{} {\n\tb, err := json.Marshal(src)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\terr = json.Unmarshal(b, dst)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn dst\n}", "func (p *IPv4) SetSrc(src net.IP) {\n\tp.src = src\n}", "func (w *XPubWallet) CopyFrom(src Wallet) {\n\txpub, err := parseXPub(src.XPub())\n\tif err != nil {\n\t\tlogger.WithError(err).Panic(\"CopyFrom parseXPub failed\")\n\t}\n\tw.xpub = xpub\n\tw.Meta = src.(*XPubWallet).Meta.clone()\n\tw.Entries = src.(*XPubWallet).Entries.clone()\n}", "func Copy(dst, src interface{}) {\n\tswitch dst := dst.(type) {\n\tcase *types.Any:\n\t\tsrc := src.(*types.Any)\n\t\tdst.TypeUrl = src.TypeUrl\n\t\tif src.Value != nil {\n\t\t\tdst.Value = make([]byte, len(src.Value))\n\t\t\tcopy(dst.Value, src.Value)\n\t\t} else {\n\t\t\tdst.Value = nil\n\t\t}\n\tcase *types.Duration:\n\t\tsrc := src.(*types.Duration)\n\t\t*dst = *src\n\tcase *time.Duration:\n\t\tsrc := src.(*time.Duration)\n\t\t*dst = *src\n\tcase *types.Timestamp:\n\t\tsrc := src.(*types.Timestamp)\n\t\t*dst = *src\n\tcase *types.BoolValue:\n\t\tsrc := src.(*types.BoolValue)\n\t\t*dst = *src\n\tcase *types.Int64Value:\n\t\tsrc := src.(*types.Int64Value)\n\t\t*dst = *src\n\tcase CopierFrom:\n\t\tdst.CopyFrom(src)\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"Copy for %T not implemented\", dst))\n\t}\n\n}", "func DeepCopy(dst interface{}, src interface{}) (err error) {\n\tvar bytes []byte\n\n\t// Serialize\n\tif src == nil {\n\t\treturn fmt.Errorf(\"source struct is nil\")\n\t}\n\tbytes, err = json.Marshal(src)\n\tif err != nil {\n\t\treturn errors.WrapPrefix(err, fmt.Sprintf(\"Failed to marshal src: %v\", err), 0)\n\t}\n\n\t// Deserialize\n\tif dst == nil {\n\t\treturn fmt.Errorf(\"destination struct is nil\")\n\t}\n\terr = json.Unmarshal(bytes, dst)\n\tif err != nil {\n\t\treturn errors.WrapPrefix(err, fmt.Sprintf(\"Failed to marshal dst: %v\", err), 0)\n\t}\n\treturn\n}", "func Copy(dst Builder, src Graph) {\n\tnodes := src.Nodes()\n\tfor _, n := range nodes {\n\t\tdst.AddNode(n)\n\t}\n\tfor _, u := range nodes {\n\t\tfor _, v := range src.From(u) {\n\t\t\tdst.SetEdge(src.Edge(u, v))\n\t\t}\n\t}\n}", "func (o *HTTPFrontendOptions) CopyFrom(src *HTTPFrontendOptions) {\n\tpatternToRgx := func(pattern string) *regexp.Regexp {\n\t\treg := regexp.QuoteMeta(strings.ToLower(pattern))\n\t\treg = strings.Replace(reg, \"\\\\*\", \".*\", -1)\n\t\treg = strings.Replace(reg, \"\\\\?\", \".\", -1)\n\t\treg = \"^\" + reg + \"$\"\n\t\treturn regexp.MustCompile(reg)\n\t}\n\n\t*o = *src\n\to.Routes = make([]HTTPFrontendRoute, len(src.Routes))\n\tcopy(o.Routes, src.Routes)\n\tfor i := range o.Routes {\n\t\troute := &o.Routes[i]\n\t\tif route.Host == \"\" {\n\t\t\troute.Host = \"*\"\n\t\t}\n\t\troute.hostRgx = patternToRgx(route.Host)\n\t\tif route.Path == \"\" {\n\t\t\troute.Path = \"*\"\n\t\t}\n\t\troute.pathRgx = patternToRgx(route.Path)\n\n\t\toldRestrictions := route.Restrictions\n\t\troute.Restrictions = make([]HTTPFrontendRestriction, len(oldRestrictions))\n\t\tcopy(route.Restrictions, oldRestrictions)\n\t\tfor j := range route.Restrictions {\n\t\t\trestriction := &route.Restrictions[j]\n\t\t\tif restriction.Path == \"\" {\n\t\t\t\trestriction.pathRgx = nil\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\trestriction.pathRgx = patternToRgx(restriction.Path)\n\t\t}\n\t}\n}", "func (m *meta) copy(dest *meta) {\n\t*dest = *m\n}", "func (m *meta) copy(dest *meta) {\n\t*dest = *m\n}", "func (m *meta) copy(dest *meta) {\n\t*dest = *m\n}", "func Copy(src []byte) (dst []byte) {\n\tif len(src) == 0 {\n\t\treturn\n\t}\n\tdst = make([]byte, len(src))\n\tcopy(dst, src)\n\treturn\n}", "func (hop *ViaHop) Copy() *ViaHop {\n\treturn &ViaHop{\n\t\tProtocolName: hop.ProtocolName,\n\t\tProtocolVersion: hop.ProtocolVersion,\n\t\tTransport: hop.Transport,\n\t\tHost: hop.Host,\n\t\tPort: hop.Port.Copy(),\n\t\tParams: hop.Params.Copy(),\n\t}\n}", "func (dst Set) Copy(src Set) Set {\n\tif Same(src, dst) {\n\t\treturn dst\n\t}\n\n\tif len(dst) > 0 {\n\t\tdst = make(Set, len(src))\n\t}\n\n\tfor e, n := range src {\n\t\tdst[e] = n\n\t}\n\n\treturn dst\n}", "func (p *IPv4) Copy() Packet {\n\tvar packet IPv4\n\n\tpacket = *p\n\n\t// Take copy of the data\n\tdata := make([]byte, len(p.data))\n\tcopy(data, p.data)\n\tpacket.data = data\n\n\treturn &packet\n}", "func Copy(src interface{}) interface{} {\n\t// Make new container\n\tdstPtrValue := r.New(r.TypeOf(src))\n\n\t// Copy\n\tCopyInto(src, dstPtrValue.Interface())\n\n\t// Return dest\n\treturn r.Indirect(dstPtrValue).Interface()\n}", "func Copy(dst, src string) {\n\tWrite(dst, Read(src))\n}", "func (c *Context) SetSrc(src image.Image) {\n\tc.src = src\n}", "func (s SafeBuffer) Copy() payload.Safe {\n\tbin := make([]byte, s.Len())\n\tcopy(bin, s.Bytes())\n\n\tb := bytes.NewBuffer(bin)\n\treturn SafeBuffer{*b}\n}", "func Copy(dest interface{}, src interface{}) error {\n\tdata, err := json.Marshal(src)\n\tif err != nil {\n\t\treturn errors.ErrorMarshal.Newm(err.Error())\n\t}\n\n\terr = json.Unmarshal(data, dest)\n\tif err != nil {\n\t\treturn errors.ErrorUnmarshal.Newm(err.Error())\n\t}\n\n\treturn nil\n}", "func (r *Resources) Copy(other *Resources) {\n\tr.CPU = other.CPU\n\tr.DISK = other.DISK\n\tr.MEMORY = other.MEMORY\n\tr.GPU = other.GPU\n}", "func (s *Spec) Copy() *Spec {\n\ttypes := copyTypes(s.types, nil)\n\n\ttypeIDOffset := TypeID(0)\n\tif len(s.types) != 0 {\n\t\ttypeIDOffset = s.typeIDs[s.types[0]]\n\t}\n\ttypeIDs, typesByName := indexTypes(types, typeIDOffset)\n\n\t// NB: Other parts of spec are not copied since they are immutable.\n\treturn &Spec{\n\t\ts.rawTypes,\n\t\ts.strings,\n\t\ttypes,\n\t\ttypeIDs,\n\t\ttypesByName,\n\t\ts.byteOrder,\n\t}\n}", "func (a *ActiveDevice) Copy(m MetaContext, src *ActiveDevice) error {\n\n\t// Take a consistent snapshot of the src device. Be careful not to hold\n\t// locks on both devices at once.\n\tsrc.Lock()\n\tuv := src.uv\n\tdeviceID := src.deviceID\n\tsigKey := src.signingKey\n\tencKey := src.encryptionKey\n\tname := src.deviceName\n\tctime := src.deviceCtime\n\tkeychainMode := src.keychainMode\n\tsrc.Unlock()\n\n\treturn a.Set(m, uv, deviceID, sigKey, encKey, name, ctime, keychainMode)\n}", "func Copy(dst Mutable, src Const) {\n\tif err := errIfDimsNotEq(src, dst); err != nil {\n\t\tpanic(err)\n\t}\n\n\tm, n := src.Dims()\n\tfor i := 0; i < m; i++ {\n\t\tfor j := 0; j < n; j++ {\n\t\t\tdst.Set(i, j, src.At(i, j))\n\t\t}\n\t}\n}", "func (original *Place) Copy() *Place {\n\treturn &Place{original.latitude, original.longitude, original.Name}\n}", "func (in *StreamBackfillNone) DeepCopyInto(out *StreamBackfillNone) {\n\t*out = *in\n\treturn\n}", "func (v Undelegate) Copy() StakeMsg {\n\tv1 := v\n\treturn v1\n}", "func (t *Type) Copy() *Type", "func Assign(dst, src any) (ok bool) {\n\treturn AssignBuf(dst, src, nil)\n}", "func (s *Stream) From(src api.StreamSource) *Stream {\n\ts.source = src\n\treturn s\n}", "func (st *Settings) CopyTo(st2 *Settings) {\n\tst2.ack = st.ack\n\tst2.rawSettings = append(st2.rawSettings[:0], st.rawSettings...)\n\tst2.tableSize = st.tableSize\n\tst2.enablePush = st.enablePush\n\tst2.maxStreams = st.maxStreams\n\tst2.windowSize = st.windowSize\n\tst2.frameSize = st.frameSize\n\tst2.headerSize = st.headerSize\n}", "func (st *Settings) CopyTo(st2 *Settings) {\n\tst2.ack = st.ack\n\tst2.rawSettings = append(st2.rawSettings[:0], st.rawSettings...)\n\tst2.tableSize = st.tableSize\n\tst2.enablePush = st.enablePush\n\tst2.maxStreams = st.maxStreams\n\tst2.windowSize = st.windowSize\n\tst2.frameSize = st.frameSize\n\tst2.headerSize = st.headerSize\n}", "func (in *TCP) DeepCopyInto(out *TCP) {\n\t*out = *in\n\treturn\n}", "func (in *HawtioNginx) DeepCopyInto(out *HawtioNginx) {\n\t*out = *in\n}", "func (in *SourceSpec) DeepCopyInto(out *SourceSpec) {\n\t*out = *in\n}", "func (in *SourceSpec) DeepCopyInto(out *SourceSpec) {\n\t*out = *in\n}", "func (sa *SuffixArray) CopyFrom(src *SuffixArray) error {\n\treturn sa.ba.CopyFrom(src.ba)\n}", "func Copy(src interface{}, dst interface{}) error {\n\tif err := validateCopy(src, dst); err != nil {\n\t\treturn err\n\t}\n\tbytes, err := json.Marshal(src)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn json.Unmarshal(bytes, dst)\n}", "func (recv *Value) Copy(destValue *Value) {\n\tc_dest_value := (*C.GValue)(C.NULL)\n\tif destValue != nil {\n\t\tc_dest_value = (*C.GValue)(destValue.ToC())\n\t}\n\n\tC.g_value_copy((*C.GValue)(recv.native), c_dest_value)\n\n\treturn\n}", "func copy(srcVal interface{}, dstVal interface{}) error {\n\n\tcurEl := reflect.ValueOf(srcVal)\n\tif curEl.Kind() == reflect.Ptr {\n\t\tcurEl = curEl.Elem()\n\t}\n\tdstEl := reflect.ValueOf(dstVal)\n\tif dstEl.Kind() == reflect.Ptr {\n\t\tdstEl = dstEl.Elem()\n\t}\n\tif !dstEl.CanSet() {\n\t\treturn ErrInvalidDstVal\n\t}\n\tif !curEl.Type().AssignableTo(dstEl.Type()) {\n\t\treturn ErrCannotAssignValue\n\t}\n\tdstEl.Set(curEl)\n\treturn nil\n}", "func (t Topology) Copy() Topology {\n\treturn Topology{\n\t\tNodes: t.Nodes.Copy(),\n\t}\n}", "func (sc *SetComprehension) Copy() *SetComprehension {\n\tcpy := *sc\n\tcpy.Body = sc.Body.Copy()\n\tcpy.Term = sc.Term.Copy()\n\treturn &cpy\n}", "func Copy(dst WriteFlusher, src Reader) error {\n\treturn CopyWithContext(context.Background(), dst, src)\n}", "func (i *BytesIterator) Copy() Object {\n\treturn &BytesIterator{v: i.v, i: i.i, l: i.l}\n}", "func (b *Board) copy() *Board {\n\tnewBoard := &Board{\n\t\tPlayer1: make([]Square, len(b.Player1)),\n\t\tPlayer2: make([]Square, len(b.Player2)),\n\t\tresult: b.result,\n\t}\n\tfor i := range b.Player1 {\n\t\tnewBoard.Player1[i] = b.Player1[i]\n\t}\n\tfor i := range b.Player2 {\n\t\tnewBoard.Player2[i] = b.Player2[i]\n\t}\n\treturn newBoard\n}", "func (z *Big) Copy(x *Big) *Big {\n\tif debug {\n\t\tx.validate()\n\t}\n\tif z != x {\n\t\tsign := x.form & signbit\n\t\tz.copyAbs(x)\n\t\tz.form |= sign\n\t}\n\treturn z\n}", "func deepcopy(dst, src interface{}) error {\n\tr, w, err := os.Pipe()\n\tif err != nil {\n\t\treturn err\n\t}\n\tenc := gob.NewEncoder(w)\n\terr = enc.Encode(src)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdec := gob.NewDecoder(r)\n\treturn dec.Decode(dst)\n}", "func (in *S3BackupSource) DeepCopyInto(out *S3BackupSource) {\n\t*out = *in\n\treturn\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\tp := proto.Clone(in).(*Source)\n\t*out = *p\n}", "func (imp *Import) Copy() *Import {\n\tcpy := *imp\n\tcpy.Path = imp.Path.Copy()\n\treturn &cpy\n}", "func (rp *GenericParser) Copy() Parser {\n\treturn nil\n}", "func (e StartElement) Copy() StartElement {\n\tattrs := make([]Attr, len(e.Attr))\n\tcopy(attrs, e.Attr)\n\te.Attr = attrs\n\treturn e\n}", "func (in *GVK) DeepCopyInto(out *GVK) {\n\t*out = *in\n}", "func (v EditValidator) Copy() StakeMsg {\n\tv1 := v\n\tdesc := *v.Description\n\tv1.Description = &desc\n\treturn v1\n}", "func Copy(fromValue interface{}, toValue interface{}) error {\n\treturn copier.Copy(toValue, fromValue)\n}", "func (fr *Frame) Copy(orig *Frame) {\n\tfr.Status = orig.Status\n\tfor y, row := range orig.Pix {\n\t\tcopy(fr.Pix[y][:], row)\n\t}\n}", "func (b *CCSBuilder) Copy(object *CCS) *CCSBuilder {\n\tif object == nil {\n\t\treturn b\n\t}\n\tb.bitmap_ = object.bitmap_\n\tb.id = object.id\n\tb.href = object.href\n\tb.disableSCPChecks = object.disableSCPChecks\n\tb.enabled = object.enabled\n\treturn b\n}", "func (er *EmptyResult) Copy() Resulter {\n\treturn &EmptyResult{}\n}", "func (w *XPubWallet) CopyFromRef(src Wallet) {\n\txpub, err := parseXPub(src.XPub())\n\tif err != nil {\n\t\tlogger.WithError(err).Panic(\"CopyFromRef parseXPub failed\")\n\t}\n\n\t*w = *(src.(*XPubWallet))\n\tw.xpub = xpub\n}", "func (in *SourcePackageSpec) DeepCopyInto(out *SourcePackageSpec) {\n\t*out = *in\n\tout.Checksum = in.Checksum\n\treturn\n}", "func (v *Vector) CopyFrom(e *Vector) {\n\tv.X = e.X\n\tv.Y = e.Y\n\tv.Z = e.Z\n}", "func (src *Source) SetBuffer(buf []byte) {\n\tsrc.buf = buf\n}", "func (in *ComponentSource) DeepCopyInto(out *ComponentSource) {\n\t*out = *in\n\tout.Git = in.Git\n\treturn\n}", "func (packet *ReliablePacket) Copy() *ReliablePacket {\n\tnewPacket := new(ReliablePacket)\n\tnewPacket.Reliability = packet.Reliability\n\tnewPacket.HasSplitPacket = packet.HasSplitPacket\n\tnewPacket.OrderingChannel = packet.OrderingChannel\n\tnewPacket.OrderingIndex = packet.OrderingIndex\n\tnewPacket.ReliableMessageNumber = packet.ReliableMessageNumber\n\tnewPacket.SequencingIndex = packet.SequencingIndex\n\tnewPacket.SplitPacketCount = packet.SplitPacketCount\n\tnewPacket.SplitPacketID = packet.SplitPacketID\n\tnewPacket.SplitPacketIndex = packet.SplitPacketIndex\n\tnewPacket.SplitBuffer = packet.SplitBuffer\n\n\treturn newPacket\n}", "func (w *Wrapper) Copy() *Wrapper {\n\treturn w.cloning(true)\n}", "func (in *GhostSpec) DeepCopyInto(out *GhostSpec) {\n\t*out = *in\n\treturn\n}", "func (t Tags) Copy() Tags {\n\tout := make(Tags)\n\tout.Import(t.Export())\n\treturn out\n}", "func (self *Rectangle) CopyFrom(source interface{}) *Rectangle{\n return &Rectangle{self.Object.Call(\"copyFrom\", source)}\n}", "func (in *VolumeSource) DeepCopyInto(out *VolumeSource) {\n\t*out = *in\n\treturn\n}", "func (in *Rack) DeepCopyInto(out *Rack) {\n\t*out = *in\n}", "func (in *KubeconfigSourceFilesystem) DeepCopyInto(out *KubeconfigSourceFilesystem) {\n\t*out = *in\n}", "func (pmp *PrivateMarketplace) Copy() *PrivateMarketplace {\n\tpmpCopy := *pmp\n\n\tif pmp.Deals != nil {\n\t\tpmpCopy.Deals = []*Deal{}\n\t\tfor i := range pmp.Deals {\n\t\t\tpmpCopy.Deals = append(pmpCopy.Deals, pmp.Deals[i].Copy())\n\t\t}\n\t}\n\n\treturn &pmpCopy\n}", "func (p *Parser) copyfrom() {\n\tp.primary()\n\tp.emitByte(OP_COPYFROM)\n}", "func (in *RedisGCacheSpec) DeepCopyInto(out *RedisGCacheSpec) {\n\t*out = *in\n\tout.CommonSpec = in.CommonSpec\n\tout.Storage = in.Storage\n\treturn\n}", "func (b *ProxyBuilder) Copy(object *Proxy) *ProxyBuilder {\n\tif object == nil {\n\t\treturn b\n\t}\n\tb.bitmap_ = object.bitmap_\n\tb.httpProxy = object.httpProxy\n\tb.httpsProxy = object.httpsProxy\n\tb.noProxy = object.noProxy\n\treturn b\n}", "func (in *GitFileGeneratorItem) DeepCopyInto(out *GitFileGeneratorItem) {\n\t*out = *in\n}", "func (o *WorkflowBuildTaskMeta) SetSrc(v string) {\n\to.Src = &v\n}", "func (g *Graph) Copy() *Graph {\n\tg2 := &Graph{}\n\tg2.MinDistance = g.MinDistance\n\tg2.nodes = make([]Node, len(g.nodes))\n\tcopy(g2.nodes, g.nodes)\n\treturn g2\n}", "func (u *IPv4) DeepCopyInto(out *IPv4) {\n\t*out = *u\n}", "func (d *DataPacket) copy() DataPacket {\n\tcopySlice := make([]byte, len(d.data))\n\tcopy(copySlice, d.data)\n\treturn DataPacket{\n\t\tdata: copySlice,\n\t\tlength: d.length,\n\t}\n}", "func (ms HistogramBucketExemplar) CopyTo(dest HistogramBucketExemplar) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetValue(ms.Value())\n\tms.Attachments().CopyTo(dest.Attachments())\n}", "func (o *Entry) Copy(s Entry, copyMacs bool) {\n\to.VlanInterface = s.VlanInterface\n\to.Interfaces = s.Interfaces\n\n\tif copyMacs {\n\t\to.StaticMacs = s.StaticMacs\n\t}\n}", "func (in *Source) DeepCopyInto(out *Source) {\n\t*out = *in\n\tout.WebFolderMeta = in.WebFolderMeta\n\tif in.Bucket != nil {\n\t\tin, out := &in.Bucket, &out.Bucket\n\t\t*out = new(Bucket)\n\t\t**out = **in\n\t}\n}", "func (in *S3Spec) DeepCopyInto(out *S3Spec) {\n\t*out = *in\n\tout.IAMUserSpec = in.IAMUserSpec\n\treturn\n}", "func copyStruct(dst *enigma, src *enigma) {\n\tdst.reflector = src.reflector\n\n\tdst.rings = make([]int, len(src.rings))\n\tcopy(dst.rings, src.rings)\n\n\tdst.positions = make([]string, len(src.positions))\n\tcopy(dst.positions, src.positions)\n\n\tdst.rotors = make([]string, len(src.rotors))\n\tcopy(dst.rotors, src.rotors)\n\n\tdst.plugboard = src.plugboard\n}", "func (in *Nginx) DeepCopyInto(out *Nginx) {\n\t*out = *in\n\treturn\n}", "func (in *Gvk) DeepCopyInto(out *Gvk) {\n\t*out = *in\n}", "func copySpec(in copySpecInput) error {\n\t// Backup fields that should be preserved from dest.\n\tpreservedFields := map[string]interface{}{}\n\tfor _, field := range in.fieldsToPreserve {\n\t\tvalue, found, err := unstructured.NestedFieldNoCopy(in.dest.Object, field...)\n\t\tif !found {\n\t\t\t// Continue if the field does not exist in src. fieldsToPreserve don't have to exist.\n\t\t\tcontinue\n\t\t} else if err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to get field %q from %s\", strings.Join(field, \".\"), tlog.KObj{Obj: in.dest})\n\t\t}\n\t\tpreservedFields[strings.Join(field, \".\")] = value\n\t}\n\n\t// Get spec from src.\n\tsrcSpec, found, err := unstructured.NestedFieldNoCopy(in.src.Object, strings.Split(in.srcSpecPath, \".\")...)\n\tif !found {\n\t\treturn errors.Errorf(\"missing field %q in %s\", in.srcSpecPath, tlog.KObj{Obj: in.src})\n\t} else if err != nil {\n\t\treturn errors.Wrapf(err, \"failed to get field %q from %s\", in.srcSpecPath, tlog.KObj{Obj: in.src})\n\t}\n\n\t// Set spec in dest.\n\tif err := unstructured.SetNestedField(in.dest.Object, srcSpec, strings.Split(in.destSpecPath, \".\")...); err != nil {\n\t\treturn errors.Wrapf(err, \"failed to set field %q on %s\", in.destSpecPath, tlog.KObj{Obj: in.dest})\n\t}\n\n\t// Restore preserved fields.\n\tfor path, value := range preservedFields {\n\t\tif err := unstructured.SetNestedField(in.dest.Object, value, strings.Split(path, \".\")...); err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to set field %q on %s\", path, tlog.KObj{Obj: in.dest})\n\t\t}\n\t}\n\treturn nil\n}", "func (s *Stats) Copy() *Stats {\n\treturn &Stats{\n\t\ts.FlowName,\n\t\ts.All.Copy(),\n\t\ts.Succeeded.Copy(),\n\t\ts.Failed.Copy(),\n\t\ts.Running.Copy(),\n\t\ts.Pending.Copy(),\n\t}\n}", "func (in *SriovPFSpec) DeepCopyInto(out *SriovPFSpec) {\n\t*out = *in\n\treturn\n}", "func (in *StreamSpec) DeepCopyInto(out *StreamSpec) {\n\t*out = *in\n\treturn\n}", "func (v *Values) Copy() device.Resulter {\n\tif v == nil {\n\t\treturn nil\n\t}\n\treturn &Values{\n\t\tSelectionMask: v.SelectionMask,\n\t\tValueMask: v.ValueMask}\n}", "func (in *Git) DeepCopyInto(out *Git) {\n\t*out = *in\n\treturn\n}", "func (dst *SFTP) Merge(src *SFTP) {\n\tif dst == nil || src == nil {\n\t\treturn\n\t}\n\n\tif src.Enabled != nil {\n\t\tdst.Enabled = src.Enabled\n\t}\n\tif src.Size != \"\" {\n\t\tdst.Size = src.Size\n\t}\n}", "func (t *Time) DeepCopyInto(out *Time) {\n\t*out = *t\n}", "func (in *SlackSender) DeepCopyInto(out *SlackSender) {\n\t*out = *in\n\treturn\n}", "func (x *BigUInt) Copy() *BigUInt {\n\ty := &BigUInt{\n\t\tdata: make([]uint8, len(x.data)),\n\t}\n\tfor i, v := range x.data {\n\t\ty.data[i] = v\n\t}\n\treturn y\n}" ]
[ "0.57939065", "0.5785679", "0.5720426", "0.5675303", "0.5671897", "0.56032854", "0.55808574", "0.5577658", "0.549812", "0.5485337", "0.547391", "0.5458561", "0.5458561", "0.5458561", "0.5450953", "0.54374933", "0.54351157", "0.5434311", "0.54340494", "0.5412223", "0.54066104", "0.53944784", "0.5378964", "0.5372085", "0.53469473", "0.5346135", "0.5341989", "0.53342223", "0.53094363", "0.5308293", "0.5286778", "0.5274629", "0.52633244", "0.52630925", "0.52630925", "0.525628", "0.52559114", "0.52475774", "0.52475774", "0.524151", "0.52341634", "0.5201671", "0.5192388", "0.5189392", "0.5171313", "0.51690245", "0.5165934", "0.5155643", "0.51525253", "0.51399845", "0.513565", "0.51221496", "0.51208115", "0.5118443", "0.5116594", "0.5116191", "0.5114018", "0.5112778", "0.5108762", "0.5108327", "0.5105653", "0.51033676", "0.509594", "0.50909805", "0.508433", "0.5082152", "0.50809526", "0.5074788", "0.5069485", "0.50691074", "0.5068908", "0.5065796", "0.5061182", "0.50486094", "0.5045169", "0.5041476", "0.50404537", "0.50324446", "0.50248325", "0.5023692", "0.5022654", "0.50184506", "0.50169384", "0.50132966", "0.50103873", "0.5010347", "0.5009192", "0.49987942", "0.4990171", "0.4987406", "0.4986357", "0.49844912", "0.4983888", "0.4983356", "0.4983205", "0.49730232", "0.4971944", "0.4968653", "0.49678499", "0.49675608" ]
0.52665424
32
copyHDataFrom appends histogram data values to this object from the src. Src histogram data values will be appended according to this object's offset and divider
func (h *Histogram) copyHDataFrom(src *Histogram) { if h.Divider == src.Divider && h.Offset == src.Offset { for i := 0; i < len(h.Hdata); i++ { h.Hdata[i] += src.Hdata[i] } return } hData := src.Export() for _, data := range hData.Data { h.record((data.Start+data.End)/2, int(data.Count)) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (h *Histogram) CopyFrom(src *Histogram) {\n\th.Counter = src.Counter\n\th.copyHDataFrom(src)\n}", "func Copy(h *hdrhistogram.Histogram) *hdrhistogram.Histogram {\n\tdup := hdrhistogram.New(h.LowestTrackableValue(), h.HighestTrackableValue(),\n\t\tint(h.SignificantFigures()))\n\tdup.Merge(h)\n\treturn dup\n}", "func (ms HistogramDataPoint) CopyTo(dest HistogramDataPoint) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.LabelsMap().CopyTo(dest.LabelsMap())\n\tdest.SetStartTime(ms.StartTime())\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetCount(ms.Count())\n\tdest.SetSum(ms.Sum())\n\tms.Buckets().CopyTo(dest.Buckets())\n\tdest.SetExplicitBounds(ms.ExplicitBounds())\n}", "func (h *Histogram) Transfer(src *Histogram) {\n\tif src.Count == 0 {\n\t\treturn\n\t}\n\tif h.Count == 0 {\n\t\th.CopyFrom(src)\n\t\tsrc.Reset()\n\t\treturn\n\t}\n\th.copyHDataFrom(src)\n\th.Counter.Transfer(&src.Counter)\n\tsrc.Reset()\n}", "func (h *Histogram) Clone() *Histogram {\n\tcopy := NewHistogram(h.Offset, h.Divider)\n\tcopy.CopyFrom(h)\n\treturn copy\n}", "func (ms HistogramBucket) CopyTo(dest HistogramBucket) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetCount(ms.Count())\n\tms.Exemplar().CopyTo(dest.Exemplar())\n}", "func Merge(h1 *Histogram, h2 *Histogram) *Histogram {\n\tdivider := h1.Divider\n\toffset := h1.Offset\n\tif h2.Divider > h1.Divider {\n\t\tdivider = h2.Divider\n\t}\n\tif h2.Offset < h1.Offset {\n\t\toffset = h2.Offset\n\t}\n\tnewH := NewHistogram(offset, divider)\n\tnewH.Transfer(h1)\n\tnewH.Transfer(h2)\n\treturn newH\n}", "func (ms HistogramBucketExemplar) CopyTo(dest HistogramBucketExemplar) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetValue(ms.Value())\n\tms.Attachments().CopyTo(dest.Attachments())\n}", "func (es HistogramDataPointSlice) CopyTo(dest HistogramDataPointSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramDataPoint(&el).CopyTo(newHistogramDataPoint(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramDataPoint(&el).CopyTo(newHistogramDataPoint(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func getHistogram(src [][3]int, size float64, pixels *[HistSize][3]float64, hist *[HistSize]float64) {\n\tvar ind, r, g, b, i int\n\tvar inr, ing, inb int\n\n\tfor i = range src {\n\t\tr = src[i][0]\n\t\tg = src[i][1]\n\t\tb = src[i][2]\n\n\t\tinr = r >> Shift\n\t\ting = g >> Shift\n\t\tinb = b >> Shift\n\n\t\tind = (inr << (2 * HistBits)) + (ing << HistBits) + inb\n\t\tpixels[ind][0], pixels[ind][1], pixels[ind][2] = float64(r), float64(g), float64(b)\n\t\thist[ind]++\n\t}\n\n\t// normalize weight by the number of pixels in the image\n\tfor i = 0; i < HistSize; i++ {\n\t\thist[i] /= size\n\t}\n}", "func (es HistogramBucketSlice) CopyTo(dest HistogramBucketSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint_Bucket(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func (d *Data) CopyPropertiesFrom(src datastore.DataService, fs storage.FilterSpec) error {\n\td2, ok := src.(*Data)\n\tif !ok {\n\t\treturn fmt.Errorf(\"unable to copy properties from non-labelmap data %q\", src.DataName())\n\t}\n\n\t// TODO -- Handle mutable data that could be potentially altered by filter.\n\td.MaxLabel = make(map[dvid.VersionID]uint64, len(d2.MaxLabel))\n\tfor k, v := range d2.MaxLabel {\n\t\td.MaxLabel[k] = v\n\t}\n\td.MaxRepoLabel = d2.MaxRepoLabel\n\td.NextLabel = d2.NextLabel\n\n\td.IndexedLabels = d2.IndexedLabels\n\td.MaxDownresLevel = d2.MaxDownresLevel\n\n\treturn d.Data.CopyPropertiesFrom(d2.Data, fs)\n}", "func (es HistogramDataPointSlice) MoveAndAppendTo(dest HistogramDataPointSlice) {\n\tif es.Len() == 0 {\n\t\t// Just to ensure that we always return a Slice with nil elements.\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\tif dest.Len() == 0 {\n\t\t*dest.orig = *es.orig\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\t*dest.orig = append(*dest.orig, *es.orig...)\n\t*es.orig = nil\n\treturn\n}", "func (f *FileHash) FillFrom(from interface {\n\tGetOffset() (value int64)\n\tGetLimit() (value int)\n\tGetHash() (value []byte)\n}) {\n\tf.Offset = from.GetOffset()\n\tf.Limit = from.GetLimit()\n\tf.Hash = from.GetHash()\n}", "func (in *Haproxy) DeepCopyInto(out *Haproxy) {\n\t*out = *in\n\treturn\n}", "func (b *Buffer) CopyDataFrom(src *Buffer, srcOffset, dstOffset, size int) error {\n\tif size == 0 {\n\t\treturn nil\n\t}\n\n\terrCode := cl.EnqueueCopyBuffer(\n\t\tb.device.cmdQueue,\n\t\tsrc.bufHandle,\n\t\tb.bufHandle,\n\t\tuint64(srcOffset),\n\t\tuint64(dstOffset),\n\t\tuint64(size),\n\t\t0,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif errCode != cl.SUCCESS {\n\t\treturn fmt.Errorf(\"opencl device(%s): error copying device data from buffer %s to buffer %s (errCode %d)\", b.device.Name, src.name, b.name, errCode)\n\t}\n\treturn nil\n}", "func (in *HDFSSpec) DeepCopyInto(out *HDFSSpec) {\n\t*out = *in\n\treturn\n}", "func (h *hash) Copy() *hash {\n\tif h == nil {\n\t\treturn nil\n\t}\n\n\th.RLock()\n\tdefer h.RUnlock()\n\n\tfresh := New()\n\tfor k, v := range h.m {\n\t\tfresh.m[k] = v.Copy()\n\t}\n\n\treturn fresh\n}", "func (h *PrometheusInstrumentHandler) Clone() model.Part {\n\th0 := *h\n\treturn &h0\n}", "func copySourceHeaders(sh http.Header) (th http.Header) {\n\tth = make(http.Header)\n\n\tif sh == nil {\n\t\treturn nil\n\t}\n\n\tfor key, values := range sh {\n\t\tif dhHeadersRe.MatchString(key) {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor _, val := range values {\n\t\t\tth.Add(key, val)\n\t\t}\n\t}\n\n\treturn th\n}", "func (ctx *Context) hInto(pad scratchPad, left, right []byte,\n\tph precomputedHashes, addr address, out []byte) {\n\tbuf := pad.hBuf()\n\tencodeUint64Into(HASH_PADDING_H, buf[:ctx.p.N])\n\taddr.setKeyAndMask(0)\n\tph.prfAddrPubSeedInto(pad, addr, buf[ctx.p.N:ctx.p.N*2])\n\taddr.setKeyAndMask(1)\n\tph.prfAddrPubSeedInto(pad, addr, buf[2*ctx.p.N:3*ctx.p.N])\n\taddr.setKeyAndMask(2)\n\tph.prfAddrPubSeedInto(pad, addr, buf[3*ctx.p.N:])\n\txor.BytesSameLen(buf[2*ctx.p.N:3*ctx.p.N], left, buf[2*ctx.p.N:3*ctx.p.N])\n\txor.BytesSameLen(buf[3*ctx.p.N:], right, buf[3*ctx.p.N:])\n\tctx.hashInto(pad, buf, out)\n}", "func (s CpuStats) CopyTo(t CpuStats) {\n\tfor k, v := range s {\n\t\t_, ok := t[k]\n\t\tif !ok {\n\t\t\tt[k] = &CpuUsage{}\n\t\t}\n\t\tt[k].Name = v.Name\n\t\tt[k].Idle = v.Idle\n\t\tt[k].Total = v.Total\n\t}\n}", "func (in *HawtioNginx) DeepCopyInto(out *HawtioNginx) {\n\t*out = *in\n}", "func (ms SummaryValueAtPercentile) CopyTo(dest SummaryValueAtPercentile) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetPercentile(ms.Percentile())\n\tdest.SetValue(ms.Value())\n}", "func (dd *dictDecoder) writeCopy(dist, length int) int {\n\tdstBase := dd.wrPos\n\tdstPos := dstBase\n\tsrcPos := dstPos - dist\n\tendPos := dstPos + length\n\tif endPos > len(dd.hist) {\n\t\tendPos = len(dd.hist)\n\t}\n\n\t// Copy non-overlapping section after destination position.\n\t//\n\t// This section is non-overlapping in that the copy length for this section\n\t// is always less than or equal to the backwards distance. This can occur\n\t// if a distance refers to data that wraps-around in the buffer.\n\t// Thus, a backwards copy is performed here; that is, the exact bytes in\n\t// the source prior to the copy is placed in the destination.\n\tif srcPos < 0 {\n\t\tsrcPos += len(dd.hist)\n\t\tdstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:])\n\t\tsrcPos = 0\n\t}\n\n\t// Copy possibly overlapping section before destination position.\n\t//\n\t// This section can overlap if the copy length for this section is larger\n\t// than the backwards distance. This is allowed by LZ77 so that repeated\n\t// strings can be succinctly represented using (dist, length) pairs.\n\t// Thus, a forwards copy is performed here; that is, the bytes copied is\n\t// possibly dependent on the resulting bytes in the destination as the copy\n\t// progresses along. This is functionally equivalent to the following:\n\t//\n\t//\tfor i := 0; i < endPos-dstPos; i++ {\n\t//\t\tdd.hist[dstPos+i] = dd.hist[srcPos+i]\n\t//\t}\n\t//\tdstPos = endPos\n\t//\n\tfor dstPos < endPos {\n\t\tdstPos += copy(dd.hist[dstPos:endPos], dd.hist[srcPos:dstPos])\n\t}\n\n\tdd.wrPos = dstPos\n\treturn dstPos - dstBase\n}", "func copyHeaders(dst, src http.Header) {\n\tfor key, vals := range src {\n\t\tfor _, val := range vals {\n\t\t\tdst.Add(key, val)\n\t\t}\n\t}\n}", "func (t *MockPgxW) CopyFrom(tableName pgx.Identifier, columnNames []string, rowSrc pgx.CopyFromSource) (int, error) {\n\ttmp := t.CurrentCopyFrom\n\tif t.ExpCopyFromTab == nil {\n\t\tt.ExpCopyFromTab = make([]pgx.Identifier, len(t.CopyFromErr))\n\t\tt.ExpCopyFromCol = make([][]string, len(t.CopyFromErr))\n\t}\n\tt.ExpCopyFromTab[tmp] = tableName\n\tt.ExpCopyFromCol[tmp] = columnNames\n\tt.CurrentCopyFrom++\n\treturn 0, t.CopyFromErr[tmp]\n}", "func copyHeader(dst http.Header, src http.Header) {\n\tfor k, _ := range dst {\n\t\tdst.Del(k)\n\t}\n\tfor k, vv := range src {\n\t\tfor _, v := range vv {\n\t\t\tdst.Add(k, v)\n\t\t}\n\t}\n}", "func (h *Header) Clone() *Header {\n\thc := &Header{slice: make([]string, len(h.slice))}\n\tcopy(hc.slice, h.slice)\n\treturn hc\n}", "func copyHeader(target, source http.Header) {\n\tfor k, vs := range source {\n\t\ttarget[k] = vs\n\t}\n}", "func (in *HawtioBranding) DeepCopyInto(out *HawtioBranding) {\n\t*out = *in\n}", "func copyHeader(src, dest http.Header) {\n\tfor key, val := range src {\n\t\tfor _, v := range val {\n\t\t\tdest.Add(key, v)\n\t\t}\n\t}\n}", "func (h *Histogram) Export() *HistogramData {\n\tvar res HistogramData\n\tres.Count = h.Counter.Count\n\tres.Min = h.Counter.Min\n\tres.Max = h.Counter.Max\n\tres.Sum = h.Counter.Sum\n\tres.Avg = h.Counter.Avg()\n\tres.StdDev = h.Counter.StdDev()\n\tmultiplier := h.Divider\n\toffset := h.Offset\n\t// calculate the last bucket index\n\tlastIdx := -1\n\tfor i := numBuckets - 1; i >= 0; i-- {\n\t\tif h.Hdata[i] > 0 {\n\t\t\tlastIdx = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif lastIdx == -1 {\n\t\treturn &res\n\t}\n\n\t// previous bucket value:\n\tprev := histogramBucketValues[0]\n\tvar total int64\n\tctrTotal := float64(h.Count)\n\t// export the data of each bucket of the histogram\n\tfor i := 0; i <= lastIdx; i++ {\n\t\tif h.Hdata[i] == 0 {\n\t\t\t// empty bucket: skip it but update prev which is needed for next iter\n\t\t\tif i < numValues {\n\t\t\t\tprev = histogramBucketValues[i]\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\tvar b Bucket\n\t\ttotal += int64(h.Hdata[i])\n\t\tif len(res.Data) == 0 {\n\t\t\t// First entry, start is min\n\t\t\tb.Start = h.Min\n\t\t} else {\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t}\n\t\tb.Percent = 100. * float64(total) / ctrTotal\n\t\tif i < numValues {\n\t\t\tcur := histogramBucketValues[i]\n\t\t\tb.End = multiplier*float64(cur) + offset\n\t\t\tprev = cur\n\t\t} else {\n\t\t\t// Last Entry\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t\tb.End = h.Max\n\t\t}\n\t\tb.Count = int64(h.Hdata[i])\n\t\tres.Data = append(res.Data, b)\n\t}\n\tres.Data[len(res.Data)-1].End = h.Max\n\treturn &res\n}", "func (ms MetricDescriptor) CopyTo(dest MetricDescriptor) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetName(ms.Name())\n\tdest.SetDescription(ms.Description())\n\tdest.SetUnit(ms.Unit())\n\tdest.SetType(ms.Type())\n}", "func (in *WebMetricHeader) DeepCopyInto(out *WebMetricHeader) {\n\t*out = *in\n\treturn\n}", "func UpdateHistogram(h *Histogram, feedback *QueryFeedback) *Histogram {\n\tbuckets, isNewBuckets, totalCount := splitBuckets(h, feedback)\n\tbuckets = mergeBuckets(buckets, isNewBuckets, float64(totalCount))\n\thist := buildNewHistogram(h, buckets)\n\t// Update the NDV of primary key column.\n\tif feedback.Tp == PkType {\n\t\thist.NDV = int64(hist.TotalRowCount())\n\t}\n\treturn hist\n}", "func (in *Hawtio) DeepCopyInto(out *Hawtio) {\n\t*out = *in\n\tout.TypeMeta = in.TypeMeta\n\tin.ObjectMeta.DeepCopyInto(&out.ObjectMeta)\n\tin.Spec.DeepCopyInto(&out.Spec)\n\tout.Status = in.Status\n}", "func (rc *ResponseCache) CopyFrom(e engine.Engine, originRangeID roachpb.RangeID) error {\n\tstart := engine.MVCCEncodeKey(\n\t\tkeys.ResponseCacheKey(originRangeID, roachpb.KeyMin))\n\tend := engine.MVCCEncodeKey(\n\t\tkeys.ResponseCacheKey(originRangeID, roachpb.KeyMax))\n\n\treturn e.Iterate(start, end, func(kv engine.MVCCKeyValue) (bool, error) {\n\t\t// Decode the key into a cmd, skipping on error. Otherwise,\n\t\t// write it to the corresponding key in the new cache.\n\t\tfamily, err := rc.decodeResponseCacheKey(kv.Key)\n\t\tif err != nil {\n\t\t\treturn false, util.Errorf(\"could not decode a response cache key %s: %s\",\n\t\t\t\troachpb.Key(kv.Key), err)\n\t\t}\n\t\tkey := keys.ResponseCacheKey(rc.rangeID, family)\n\t\tencKey := engine.MVCCEncodeKey(key)\n\t\t// Decode the value, update the checksum and re-encode.\n\t\tmeta := &engine.MVCCMetadata{}\n\t\tif err := proto.Unmarshal(kv.Value, meta); err != nil {\n\t\t\treturn false, util.Errorf(\"could not decode response cache value %s [% x]: %s\",\n\t\t\t\troachpb.Key(kv.Key), kv.Value, err)\n\t\t}\n\t\tmeta.Value.Checksum = nil\n\t\tmeta.Value.InitChecksum(key)\n\t\t_, _, err = engine.PutProto(e, encKey, meta)\n\t\treturn false, err\n\t})\n}", "func (ms SummaryDataPoint) CopyTo(dest SummaryDataPoint) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.LabelsMap().CopyTo(dest.LabelsMap())\n\tdest.SetStartTime(ms.StartTime())\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetCount(ms.Count())\n\tdest.SetSum(ms.Sum())\n\tms.ValueAtPercentiles().CopyTo(dest.ValueAtPercentiles())\n}", "func (ms InstrumentationLibraryMetrics) CopyTo(dest InstrumentationLibraryMetrics) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.InstrumentationLibrary().CopyTo(dest.InstrumentationLibrary())\n\tms.Metrics().CopyTo(dest.Metrics())\n}", "func (in *CodisProxyHPASpec) DeepCopyInto(out *CodisProxyHPASpec) {\n\t*out = *in\n\treturn\n}", "func copyInnerNodeHeader(dst, src *innerNodeHeader) {\n\t// Shallow copy is sufficient because prefix is an embedded array of byte\n\t// not a slice pointing to a shared array, but we can't just use = since\n\t// that would override the id and ref in nodeHeader\n\tdst.leaf = src.leaf\n\tdst.nChildren = src.nChildren\n\tdst.prefixLen = src.prefixLen\n\tdst.prefix = src.prefix\n}", "func (d *dataUsageCache) copyWithChildren(src *dataUsageCache, hash dataUsageHash, parent *dataUsageHash) {\n\tif d.Cache == nil {\n\t\td.Cache = make(map[string]dataUsageEntry, 100)\n\t}\n\te, ok := src.Cache[hash.String()]\n\tif !ok {\n\t\treturn\n\t}\n\td.Cache[hash.Key()] = e\n\tfor ch := range e.Children {\n\t\tif ch == hash.Key() {\n\t\t\tlogger.LogIf(GlobalContext, errors.New(\"dataUsageCache.copyWithChildren: Circular reference\"))\n\t\t\treturn\n\t\t}\n\t\td.copyWithChildren(src, dataUsageHash(ch), &hash)\n\t}\n\tif parent != nil {\n\t\tp := d.Cache[parent.Key()]\n\t\tp.addChild(hash)\n\t\td.Cache[parent.Key()] = p\n\t}\n}", "func copyHeader(which string, from http.Header, to http.Header) {\n\tfor _, value := range from.Values(which) {\n\t\tto.Add(which, value)\n\t}\n}", "func (p *SexpHash) CloneFrom(src *SexpHash) {\n\n\tp.TypeName = src.TypeName\n\tp.Map = *(src.CopyMap())\n\n\tp.KeyOrder = src.KeyOrder\n\tp.GoStructFactory = src.GoStructFactory\n\tp.NumKeys = src.NumKeys\n\tp.GoMethods = src.GoMethods\n\tp.GoFields = src.GoFields\n\tp.GoMethSx = src.GoMethSx\n\tp.GoFieldSx = src.GoFieldSx\n\tp.GoType = src.GoType\n\tp.NumMethod = src.NumMethod\n\tp.GoShadowStruct = src.GoShadowStruct\n\tp.GoShadowStructVa = src.GoShadowStructVa\n\tp.ShadowSet = src.ShadowSet\n\n\t// json tag name -> pointers to example values, as factories for SexpToGoStructs()\n\tp.JsonTagMap = make(map[string]*HashFieldDet)\n\tfor k, v := range src.JsonTagMap {\n\t\tp.JsonTagMap[k] = v\n\t}\n\tp.DetOrder = src.DetOrder\n\n\t// for using these as a scoping model\n\tp.DefnEnv = src.DefnEnv\n\tp.SuperClass = src.SuperClass\n\tp.ZMain = src.ZMain\n\tp.ZMethods = make(map[string]*SexpFunction)\n\tfor k, v := range src.ZMethods {\n\t\tp.ZMethods[k] = v\n\t}\n\tp.Env = src.Env\n}", "func mockStatsHistogram(id int64, values []types.Datum, repeat int64, tp *types.FieldType) *statistics.Histogram {\n\tndv := len(values)\n\thistogram := statistics.NewHistogram(id, int64(ndv), 0, 0, tp, ndv, 0)\n\tfor i := 0; i < ndv; i++ {\n\t\thistogram.AppendBucket(&values[i], &values[i], repeat*int64(i+1), repeat)\n\t}\n\treturn histogram\n}", "func (in *HawtioStatus) DeepCopyInto(out *HawtioStatus) {\n\t*out = *in\n}", "func (ms Metric) CopyTo(dest Metric) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.MetricDescriptor().CopyTo(dest.MetricDescriptor())\n\tms.Int64DataPoints().CopyTo(dest.Int64DataPoints())\n\tms.DoubleDataPoints().CopyTo(dest.DoubleDataPoints())\n\tms.HistogramDataPoints().CopyTo(dest.HistogramDataPoints())\n\tms.SummaryDataPoints().CopyTo(dest.SummaryDataPoints())\n}", "func CopyHeader(dst *LogBuffer, src *LogBuffer) {\n\tsrc.headerMU.Lock()\n\tdup, err := copystructure.Copy(src.header)\n\tdupBanner := src.AddBanner\n\tsrc.headerMU.Unlock()\n\n\tdst.headerMU.Lock()\n\tif err != nil {\n\t\tdst.header = map[string]interface{}{}\n\t} else {\n\t\tdst.header = dup.(map[string]interface{})\n\t}\n\tdst.AddBanner = dupBanner\n\tdst.headerMU.Unlock()\n}", "func (m *MockHistogram) LoadFromCheckpoint(checkpoint *vpa_types.HistogramCheckpoint) error {\n\treturn nil\n}", "func hashData(src io.Reader) (string, error) {\n\th := sha256.New()\n\tif _, err := io.Copy(h, src); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn \"sha256:\" + hex.EncodeToString(h.Sum(nil)), nil\n}", "func copyTaskHealthMetrics(from []*ecstcs.TaskHealth) []*ecstcs.TaskHealth {\n\tto := make([]*ecstcs.TaskHealth, len(from))\n\tcopy(to, from)\n\treturn to\n}", "func (es HistogramBucketSlice) MoveAndAppendTo(dest HistogramBucketSlice) {\n\tif es.Len() == 0 {\n\t\t// Just to ensure that we always return a Slice with nil elements.\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\tif dest.Len() == 0 {\n\t\t*dest.orig = *es.orig\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\t*dest.orig = append(*dest.orig, *es.orig...)\n\t*es.orig = nil\n\treturn\n}", "func (in *Hash) DeepCopyInto(out *Hash) {\n\t*out = *in\n}", "func (h *HexColor) DeepCopyInto(out *HexColor) {\n\t*out = *h\n}", "func (dc *Float32DictConverter) Copy(out interface{}, vals []utils.IndexType) error {\n\to := out.([]float32)\n\tfor idx, val := range vals {\n\t\to[idx] = dc.dict[val]\n\t}\n\treturn nil\n}", "func (in *Hawtio) DeepCopy() *Hawtio {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Hawtio)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (in *HttpHealthCheck) DeepCopyInto(out *HttpHealthCheck) {\n\t*out = *in\n\tif in.Headers != nil {\n\t\tin, out := &in.Headers, &out.Headers\n\t\t*out = make(map[string]string, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n\treturn\n}", "func (in *StatsSpec) DeepCopyInto(out *StatsSpec) {\n\t*out = *in\n\treturn\n}", "func (in *HaproxyStatus) DeepCopyInto(out *HaproxyStatus) {\n\t*out = *in\n\treturn\n}", "func (rb *RingBuffer) Clone() *RingBuffer {\n\trb.lock.RLock()\n\tdefer rb.lock.RUnlock()\n\tcp := make([]stats.Record, len(rb.data))\n\tcopy(cp, rb.data)\n\treturn &RingBuffer{seq: rb.seq, data: cp}\n}", "func (d TestSink) Histogram(c *telemetry.Context, stat string, value float64) {\n\td[stat] = TestMetric{\"Histogram\", value, c.Tags()}\n}", "func (in *Headers) DeepCopyInto(out *Headers) {\n\tp := proto.Clone(in).(*Headers)\n\t*out = *p\n}", "func (in *HawtioProductInfo) DeepCopyInto(out *HawtioProductInfo) {\n\t*out = *in\n}", "func (b *BaseImpl) Copy(src Base, srcOff, size, dstOff, extend int) {\n\n\t// src, ok := osrc.(*BaseImpl)\n\t// if !ok {\n\t// \tlog.Log(LOG_WARN, log.Printf(\"BaseImpl.Copy() src is only BaseImpl\"))\n\n\t// \treturn\n\t// }\n\n\tif len(b.bytes) > dstOff {\n\t\tdiff := Diff{Offset: dstOff, bytes: b.bytes[dstOff:]}\n\t\tb.Diffs = append(b.Diffs, diff)\n\t\t//b.bytes = b.bytes[:dstOff]\n\t}\n\n\tfor i, diff := range b.Diffs {\n\t\tif diff.Offset >= dstOff {\n\t\t\tdiff.Offset += extend\n\t\t}\n\t\tb.Diffs[i] = diff\n\t}\n\n\tif len(src.R(0)) > srcOff {\n\t\tnSize := len(src.R(0)[srcOff:])\n\t\tif nSize > size {\n\t\t\tnSize = size\n\t\t}\n\n\t\tdiff := Diff{Offset: dstOff, bytes: src.R(0)[srcOff : srcOff+nSize]}\n\t\tb.Diffs = append(b.Diffs, diff)\n\t}\n\tfor _, diff := range src.GetDiffs() {\n\t\tif diff.Offset >= srcOff {\n\t\t\tnDiff := diff\n\t\t\tnDiff.Offset -= srcOff\n\t\t\tnDiff.Offset += dstOff\n\t\t\tb.Diffs = append(b.Diffs, nDiff)\n\t\t}\n\t}\n\treturn\n}", "func (es SummaryValueAtPercentileSlice) CopyTo(dest SummaryValueAtPercentileSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.SummaryDataPoint_ValueAtPercentile(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewSummaryValueAtPercentile(&el).CopyTo(newSummaryValueAtPercentile(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.SummaryDataPoint_ValueAtPercentile, newLen)\n\twrappers := make([]*otlpmetrics.SummaryDataPoint_ValueAtPercentile, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewSummaryValueAtPercentile(&el).CopyTo(newSummaryValueAtPercentile(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func (c *SpanContext) CopyFrom(ctx *SpanContext) {\n\tc.traceID = ctx.traceID\n\tc.spanID = ctx.spanID\n\tc.parentID = ctx.parentID\n\tc.samplingState = ctx.samplingState\n\tif l := len(ctx.baggage); l > 0 {\n\t\tc.baggage = make(map[string]string, l)\n\t\tfor k, v := range ctx.baggage {\n\t\t\tc.baggage[k] = v\n\t\t}\n\t} else {\n\t\tc.baggage = nil\n\t}\n}", "func copyHeaders(source http.Header, destination http.Header) {\n\tfor h, vs := range source {\n\t\tfor _, v := range vs {\n\t\t\tdestination.Set(h, v)\n\t\t}\n\t}\n}", "func (o *txSocketQueue) copyToHead(buf []byte) uint32 {\n\tm := o.getOrAllocIndex(&o.head)\n\tfree := o.head.getFree()\n\tz := uint16(minSpace(free, uint32(len(buf))))\n\tm.Append(buf[:z])\n\to.head.Inc(z, o.refSize)\n\treturn uint32(z)\n}", "func (in *SensuHandler) DeepCopyInto(out *SensuHandler) {\n\t*out = *in\n\tout.TypeMeta = in.TypeMeta\n\tin.ObjectMeta.DeepCopyInto(&out.ObjectMeta)\n\tin.Spec.DeepCopyInto(&out.Spec)\n\tout.Status = in.Status\n\treturn\n}", "func (rc *ResponseCache) CopyInto(e engine.Engine, destRangeID roachpb.RangeID) error {\n\tstart := engine.MVCCEncodeKey(\n\t\tkeys.ResponseCacheKey(rc.rangeID, roachpb.KeyMin))\n\tend := engine.MVCCEncodeKey(\n\t\tkeys.ResponseCacheKey(rc.rangeID, roachpb.KeyMax))\n\n\treturn e.Iterate(start, end, func(kv engine.MVCCKeyValue) (bool, error) {\n\t\t// Decode the key into a cmd, skipping on error. Otherwise,\n\t\t// write it to the corresponding key in the new cache.\n\t\tfamily, err := rc.decodeResponseCacheKey(kv.Key)\n\t\tif err != nil {\n\t\t\treturn false, util.Errorf(\"could not decode a response cache key %s: %s\",\n\t\t\t\troachpb.Key(kv.Key), err)\n\t\t}\n\t\tkey := keys.ResponseCacheKey(destRangeID, family)\n\t\tencKey := engine.MVCCEncodeKey(key)\n\t\t// Decode the value, update the checksum and re-encode.\n\t\tmeta := &engine.MVCCMetadata{}\n\t\tif err := proto.Unmarshal(kv.Value, meta); err != nil {\n\t\t\treturn false, util.Errorf(\"could not decode response cache value %s [% x]: %s\",\n\t\t\t\troachpb.Key(kv.Key), kv.Value, err)\n\t\t}\n\t\tmeta.Value.Checksum = nil\n\t\tmeta.Value.InitChecksum(key)\n\t\t_, _, err = engine.PutProto(e, encKey, meta)\n\t\treturn false, err\n\t})\n}", "func ucopy(dst, src uptr, size uintptr) {\n\telems := (*reflect.SliceHeader)(src).Len\n\tif elems == 0 {\n\t\treturn\n\t}\n\t// Access the slice's underlying data\n\tsrc = uptr((*reflect.SliceHeader)(src).Data)\n\tcopy(\n\t\t(*[math.MaxInt32]byte)(dst)[:uintptr(elems)*size],\n\t\t(*[math.MaxInt32]byte)(src)[:uintptr(elems)*size],\n\t)\n}", "func (m *MockHistogram) Merge(other Histogram) {\n\tm.Called(other)\n}", "func (in *HealthCheck) DeepCopyInto(out *HealthCheck) {\n\t*out = *in\n\tout.Data = in.Data\n}", "func (c *Chain) Copy(chain *Chain) {\n\tc.hs = make([]func(http.Handler) http.Handler, len(chain.hs))\n\n\tfor k := range chain.hs {\n\t\tc.hs[k] = chain.hs[k]\n\t}\n}", "func (fr *Frame) Copy(orig *Frame) {\n\tfr.Status = orig.Status\n\tfor y, row := range orig.Pix {\n\t\tcopy(fr.Pix[y][:], row)\n\t}\n}", "func (r *Replicator) ReplicateDataFrom(sourceObj interface{}, targetObj interface{}) error {\n\tsource := sourceObj.(*v1.Secret)\n\ttarget := targetObj.(*v1.Secret)\n\n\t// make sure replication is allowed\n\tlogger := log.\n\t\tWithField(\"kind\", r.Kind).\n\t\tWithField(\"source\", common.MustGetKey(source)).\n\t\tWithField(\"target\", common.MustGetKey(target))\n\n\tif ok, err := r.IsReplicationPermitted(&target.ObjectMeta, &source.ObjectMeta); !ok {\n\t\treturn errors.Wrapf(err, \"replication of target %s is not permitted\", common.MustGetKey(source))\n\t}\n\n\ttargetVersion, ok := target.Annotations[common.ReplicatedFromVersionAnnotation]\n\tsourceVersion := source.ResourceVersion\n\n\tif ok && targetVersion == sourceVersion {\n\t\tlogger.Debugf(\"target %s is already up-to-date\", common.MustGetKey(target))\n\t\treturn nil\n\t}\n\n\ttargetCopy := target.DeepCopy()\n\tif targetCopy.Data == nil {\n\t\ttargetCopy.Data = make(map[string][]byte)\n\t}\n\n\tprevKeys, hasPrevKeys := common.PreviouslyPresentKeys(&targetCopy.ObjectMeta)\n\treplicatedKeys := make([]string, 0)\n\n\tfor key, value := range source.Data {\n\t\tnewValue := make([]byte, len(value))\n\t\tcopy(newValue, value)\n\t\ttargetCopy.Data[key] = newValue\n\n\t\treplicatedKeys = append(replicatedKeys, key)\n\t\tdelete(prevKeys, key)\n\t}\n\n\tif hasPrevKeys {\n\t\tfor k := range prevKeys {\n\t\t\tlogger.Debugf(\"removing previously present key %s: not present in source any more\", k)\n\t\t\tdelete(targetCopy.Data, k)\n\t\t}\n\t}\n\n\tsort.Strings(replicatedKeys)\n\n\tlogger.Infof(\"updating target %s\", common.MustGetKey(target))\n\n\ttargetCopy.Annotations[common.ReplicatedAtAnnotation] = time.Now().Format(time.RFC3339)\n\ttargetCopy.Annotations[common.ReplicatedFromVersionAnnotation] = source.ResourceVersion\n\ttargetCopy.Annotations[common.ReplicatedKeysAnnotation] = strings.Join(replicatedKeys, \",\")\n\n\ts, err := r.Client.CoreV1().Secrets(target.Namespace).Update(context.TODO(), targetCopy, metav1.UpdateOptions{})\n\tif err != nil {\n\t\terr = errors.Wrapf(err, \"Failed updating target %s/%s\", target.Namespace, targetCopy.Name)\n\t} else if err = r.Store.Update(s); err != nil {\n\t\terr = errors.Wrapf(err, \"Failed to update cache for %s/%s: %v\", target.Namespace, targetCopy, err)\n\t}\n\treturn err\n}", "func (ms HistogramDataPoint) InitEmpty() {\n\t*ms.orig = &otlpmetrics.HistogramDataPoint{}\n}", "func (ms ResourceMetrics) CopyTo(dest ResourceMetrics) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.Resource().CopyTo(dest.Resource())\n\tms.InstrumentationLibraryMetrics().CopyTo(dest.InstrumentationLibraryMetrics())\n}", "func (in *HealthCheckData) DeepCopyInto(out *HealthCheckData) {\n\t*out = *in\n}", "func (s *f64) Append(src Floating) {\n\tmustSameChannels(s.Channels(), src.Channels())\n\toffset := s.Len()\n\tif s.Cap() < s.Len()+src.Len() {\n\t\ts.buffer = append(s.buffer, make([]float64, src.Len())...)\n\t} else {\n\t\ts.buffer = s.buffer[:s.Len()+src.Len()]\n\t}\n\tfor i := 0; i < src.Len(); i++ {\n\t\ts.SetSample(i+offset, src.Sample(i))\n\t}\n\talignCapacity(&s.buffer, s.Channels(), s.Cap())\n}", "func CopyColorSubTable(target uint32, start int32, x int32, y int32, width int32) {\n C.glowCopyColorSubTable(gpCopyColorSubTable, (C.GLenum)(target), (C.GLsizei)(start), (C.GLint)(x), (C.GLint)(y), (C.GLsizei)(width))\n}", "func CopyHeader(h *Header) *Header {\n\tcpy := *h\n\tif cpy.Time = new(big.Int); h.Time != nil {\n\t\tcpy.Time.Set(h.Time)\n\t}\n\tif cpy.SnailNumber = new(big.Int); h.SnailNumber != nil {\n\t\tcpy.SnailNumber.Set(h.SnailNumber)\n\t}\n\tif cpy.Number = new(big.Int); h.Number != nil {\n\t\tcpy.Number.Set(h.Number)\n\t}\n\tif len(h.Extra) > 0 {\n\t\tcpy.Extra = make([]byte, len(h.Extra))\n\t\tcopy(cpy.Extra, h.Extra)\n\t}\n\treturn &cpy\n}", "func (c *Collector) LoadFromRuntimeMetrics(h *metrics.Float64Histogram) {\n\tc.Lock()\n\tdefer c.Unlock()\n\n\tc.Buckets = make([]Bucket, len(h.Buckets)-1)\n\tc.BucketsLimit = len(h.Buckets)\n\tc.Bucket = Bucket{\n\t\tMin: h.Buckets[0],\n\t\tMax: h.Buckets[0],\n\t}\n\n\tfor i, b := range h.Buckets[1:] {\n\t\tbb := Bucket{\n\t\t\tMin: c.Bucket.Max,\n\t\t\tMax: b,\n\t\t\tCount: int(h.Counts[i]),\n\t\t}\n\n\t\tif bb.Count != 0 && !math.IsInf(b, 0) {\n\t\t\tbb.Sum = float64(bb.Count) * b\n\t\t\tc.Bucket.Sum += bb.Sum\n\t\t}\n\n\t\tc.Bucket.Count += bb.Count\n\t\tc.Bucket.Max = b\n\n\t\tc.Buckets[i] = bb\n\t}\n}", "func (g LabeledAdjacencyList) Copy() (c LabeledAdjacencyList, ma int) {\n\tc = make(LabeledAdjacencyList, len(g))\n\tfor n, to := range g {\n\t\tc[n] = append([]Half{}, to...)\n\t\tma += len(to)\n\t}\n\treturn\n}", "func (in Metric) DeepCopyInto(out *Metric) {\n\t{\n\t\tin := &in\n\t\t*out = make(Metric, len(*in))\n\t\tfor key, val := range *in {\n\t\t\t(*out)[key] = val\n\t\t}\n\t}\n}", "func getHistogramSamples(name string,\n\tlabels []*prometheus_models.LabelPair,\n\ttimestampMs int64,\n\th *prometheus_models.Histogram,\n\tentity string,\n) []Sample {\n\tsamples := make([]Sample, len(h.GetBucket())*2+2)\n\tsamples[0] = Sample{\n\t\tname: name + \"_count\",\n\t\tlabels: labels,\n\t\ttimestampMs: timestampMs,\n\t\tvalue: strconv.FormatUint(h.GetSampleCount(), 10),\n\t\tentity: entity,\n\t}\n\tsamples[1] = Sample{\n\t\tname: name + \"_sum\",\n\t\tlabels: labels,\n\t\ttimestampMs: timestampMs,\n\t\tvalue: strconv.FormatFloat(h.GetSampleSum(), 'E', -1, 64),\n\t\tentity: entity,\n\t}\n\tfor i, b := range h.GetBucket() {\n\t\tsamples[i+2] = Sample{\n\t\t\tname: fmt.Sprintf(\"%s_bucket_%d_le\", name, i),\n\t\t\tlabels: labels,\n\t\t\ttimestampMs: timestampMs,\n\t\t\tvalue: strconv.FormatFloat(b.GetUpperBound(), 'E', -1, 64),\n\t\t\tentity: entity,\n\t\t}\n\t\tsamples[i+3] = Sample{\n\t\t\tname: fmt.Sprintf(\"%s_bucket_%d_count\", name, i),\n\t\t\tlabels: labels,\n\t\t\ttimestampMs: timestampMs,\n\t\t\tvalue: strconv.FormatUint(b.GetCumulativeCount(), 10),\n\t\t\tentity: entity,\n\t\t}\n\t}\n\treturn samples\n}", "func ImplementationWrapHashCopy(pointer unsafe.Pointer) (Hash, error) {\n\tctx := (*C.vscf_impl_t)(pointer)\n\tshallowCopy := C.vscf_impl_shallow_copy(ctx)\n\treturn ImplementationWrapHash(unsafe.Pointer(shallowCopy))\n}", "func (b *SummaryMetricsBuilder) Copy(object *SummaryMetrics) *SummaryMetricsBuilder {\n\tif object == nil {\n\t\treturn b\n\t}\n\tb.bitmap_ = object.bitmap_\n\tb.name = object.name\n\tif object.vector != nil {\n\t\tb.vector = make([]*SummarySampleBuilder, len(object.vector))\n\t\tfor i, v := range object.vector {\n\t\t\tb.vector[i] = NewSummarySample().Copy(v)\n\t\t}\n\t} else {\n\t\tb.vector = nil\n\t}\n\treturn b\n}", "func (in *BackendStatus) DeepCopyInto(out *BackendStatus) {\n\t*out = *in\n\tout.HaproxyStatus = in.HaproxyStatus\n\treturn\n}", "func (ms SummaryDataPointValueAtQuantile) CopyTo(dest SummaryDataPointValueAtQuantile) {\n\tdest.SetQuantile(ms.Quantile())\n\tdest.SetValue(ms.Value())\n}", "func CopyHeaders(dst, src http.Header) {\n\tfor k, vv := range src {\n\t\tdst[k] = append([]string{}, vv...)\n\t}\n}", "func (in *Percentage) DeepCopyInto(out *Percentage) {\n\t*out = *in\n}", "func (in *HostRuleHTTPPolicy) DeepCopyInto(out *HostRuleHTTPPolicy) {\n\t*out = *in\n\tif in.PolicySets != nil {\n\t\tin, out := &in.PolicySets, &out.PolicySets\n\t\t*out = make([]string, len(*in))\n\t\tcopy(*out, *in)\n\t}\n\treturn\n}", "func newBitmapFrom(other *bitmap, size int) *bitmap {\n\tbitmap := newBitmap(size)\n\n\tif size > other.Size {\n\t\tsize = other.Size\n\t}\n\n\tdiv := size / 8\n\n\tfor i := 0; i < div; i++ {\n\t\tbitmap.data[i] = other.data[i]\n\t}\n\n\tfor i := div * 8; i < size; i++ {\n\t\tif other.Bit(i) == 1 {\n\t\t\tbitmap.Set(i)\n\t\t}\n\t}\n\n\treturn bitmap\n}", "func (in *HighAvailabilitySpec) DeepCopyInto(out *HighAvailabilitySpec) {\n\t*out = *in\n\treturn\n}", "func (v *Values) Copy() device.Resulter {\n\tif v == nil {\n\t\treturn nil\n\t}\n\treturn &Values{\n\t\tSelectionMask: v.SelectionMask,\n\t\tValueMask: v.ValueMask}\n}", "func (in *RedisInsightSpec) DeepCopyInto(out *RedisInsightSpec) {\n\t*out = *in\n\tif in.MaxClients != nil {\n\t\tin, out := &in.MaxClients, &out.MaxClients\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.ConnectedClients != nil {\n\t\tin, out := &in.ConnectedClients, &out.ConnectedClients\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.BlockedClients != nil {\n\t\tin, out := &in.BlockedClients, &out.BlockedClients\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.TotalKeys != nil {\n\t\tin, out := &in.TotalKeys, &out.TotalKeys\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.ExpiredKeys != nil {\n\t\tin, out := &in.ExpiredKeys, &out.ExpiredKeys\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.EvictedKeys != nil {\n\t\tin, out := &in.EvictedKeys, &out.EvictedKeys\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.ReceivedConnections != nil {\n\t\tin, out := &in.ReceivedConnections, &out.ReceivedConnections\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.RejectedConnections != nil {\n\t\tin, out := &in.RejectedConnections, &out.RejectedConnections\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.SlowLogThresholdMicroSeconds != nil {\n\t\tin, out := &in.SlowLogThresholdMicroSeconds, &out.SlowLogThresholdMicroSeconds\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\tif in.SlowLogMaxLen != nil {\n\t\tin, out := &in.SlowLogMaxLen, &out.SlowLogMaxLen\n\t\t*out = new(int64)\n\t\t**out = **in\n\t}\n\treturn\n}", "func (ms Int64DataPoint) CopyTo(dest Int64DataPoint) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.LabelsMap().CopyTo(dest.LabelsMap())\n\tdest.SetStartTime(ms.StartTime())\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetValue(ms.Value())\n}", "func (in *Percent) DeepCopyInto(out *Percent) {\n\tp := proto.Clone(in).(*Percent)\n\t*out = *p\n}" ]
[ "0.7257423", "0.64692676", "0.6033145", "0.58727694", "0.56131786", "0.5535077", "0.534785", "0.53052396", "0.52886736", "0.52249086", "0.4995311", "0.49535173", "0.48267362", "0.47995722", "0.47640717", "0.47459516", "0.47377744", "0.47306165", "0.47177565", "0.46940556", "0.46884346", "0.46858898", "0.46787548", "0.46677905", "0.46291304", "0.46274927", "0.46243224", "0.45912614", "0.45878762", "0.4582347", "0.45808038", "0.45801467", "0.4567163", "0.45615855", "0.4560856", "0.45474786", "0.454583", "0.45356265", "0.45206985", "0.45098168", "0.4502977", "0.44890776", "0.4489037", "0.44858974", "0.4485426", "0.4476211", "0.4469972", "0.44567883", "0.44555724", "0.44532603", "0.44472358", "0.44423795", "0.44398147", "0.4432016", "0.44248843", "0.44196722", "0.44111925", "0.44086128", "0.4394844", "0.43866393", "0.43815345", "0.43809825", "0.43745777", "0.43727243", "0.43688747", "0.4365255", "0.43534264", "0.4348274", "0.4346236", "0.43458742", "0.43414745", "0.43404806", "0.433573", "0.43327075", "0.43298253", "0.43160355", "0.43155858", "0.43126288", "0.43104973", "0.43080196", "0.43038514", "0.4298655", "0.42943338", "0.4287763", "0.4284404", "0.42834157", "0.42797014", "0.4275763", "0.42728168", "0.4265021", "0.42602137", "0.42518738", "0.4248787", "0.42468166", "0.42437688", "0.4238404", "0.4236805", "0.42311195", "0.4222861", "0.422255" ]
0.8670082
0
Merge two different histogram with different scale parameters Lowest offset and highest divider value will be selected on new Histogram as scale parameters
func Merge(h1 *Histogram, h2 *Histogram) *Histogram { divider := h1.Divider offset := h1.Offset if h2.Divider > h1.Divider { divider = h2.Divider } if h2.Offset < h1.Offset { offset = h2.Offset } newH := NewHistogram(offset, divider) newH.Transfer(h1) newH.Transfer(h2) return newH }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func MergeHistograms(a HistogramBuckets, b HistogramBuckets) (HistogramBuckets, error) {\n\tcounts := make(map[HistogramBucket]uint64)\n\tfor _, cur := range a {\n\t\tkey := HistogramBucket{\n\t\t\tScale: cur.Scale,\n\t\t\tLowerBound: cur.LowerBound,\n\t\t\tUpperBound: cur.UpperBound,\n\t\t}\n\t\tcounts[key] = cur.Count\n\t}\n\tfor _, cur := range b {\n\t\tkey := HistogramBucket{\n\t\t\tScale: cur.Scale,\n\t\t\tLowerBound: cur.LowerBound,\n\t\t\tUpperBound: cur.UpperBound,\n\t\t}\n\t\tprevCount, ok := counts[key]\n\t\tif !ok {\n\t\t\treturn nil, fmt.Errorf(\"[%f, %f] not found in previous counts\", cur.LowerBound, cur.UpperBound)\n\t\t}\n\t\tcounts[key] = prevCount + cur.Count\n\t}\n\ths := make(HistogramBuckets, 0, len(counts))\n\tfor k, v := range counts {\n\t\ths = append(hs, HistogramBucket{\n\t\t\tScale: k.Scale,\n\t\t\tLowerBound: k.LowerBound,\n\t\t\tUpperBound: k.UpperBound,\n\t\t\tCount: v,\n\t\t})\n\t}\n\tsort.Sort(HistogramBuckets(hs))\n\treturn hs, nil\n}", "func (m *MockHistogram) Merge(other Histogram) {\n\tm.Called(other)\n}", "func TestHistogramMatches(t *testing.T) {\n\th1 := Histogram{\n\t\tSchema: 3,\n\t\tCount: 61,\n\t\tSum: 2.7,\n\t\tZeroThreshold: 0.1,\n\t\tZeroCount: 42,\n\t\tPositiveSpans: []Span{\n\t\t\t{Offset: 0, Length: 4},\n\t\t\t{Offset: 10, Length: 3},\n\t\t},\n\t\tPositiveBuckets: []int64{1, 2, -2, 1, -1, 0, 0},\n\t\tNegativeSpans: []Span{\n\t\t\t{Offset: 0, Length: 4},\n\t\t\t{Offset: 10, Length: 3},\n\t\t},\n\t\tNegativeBuckets: []int64{1, 2, -2, 1, -1, 0, 0},\n\t}\n\n\tequals := func(h1, h2 Histogram) {\n\t\trequire.True(t, h1.Equals(&h2))\n\t\trequire.True(t, h2.Equals(&h1))\n\t\th1f, h2f := h1.ToFloat(), h2.ToFloat()\n\t\trequire.True(t, h1f.Equals(h2f))\n\t\trequire.True(t, h2f.Equals(h1f))\n\t}\n\tnotEquals := func(h1, h2 Histogram) {\n\t\trequire.False(t, h1.Equals(&h2))\n\t\trequire.False(t, h2.Equals(&h1))\n\t\th1f, h2f := h1.ToFloat(), h2.ToFloat()\n\t\trequire.False(t, h1f.Equals(h2f))\n\t\trequire.False(t, h2f.Equals(h1f))\n\t}\n\n\th2 := h1.Copy()\n\tequals(h1, *h2)\n\n\t// Changed spans but same layout.\n\th2.PositiveSpans = append(h2.PositiveSpans, Span{Offset: 5})\n\th2.NegativeSpans = append(h2.NegativeSpans, Span{Offset: 2})\n\tequals(h1, *h2)\n\t// Adding empty spans in between.\n\th2.PositiveSpans[1].Offset = 6\n\th2.PositiveSpans = []Span{\n\t\th2.PositiveSpans[0],\n\t\t{Offset: 1},\n\t\t{Offset: 3},\n\t\th2.PositiveSpans[1],\n\t\th2.PositiveSpans[2],\n\t}\n\th2.NegativeSpans[1].Offset = 5\n\th2.NegativeSpans = []Span{\n\t\th2.NegativeSpans[0],\n\t\t{Offset: 2},\n\t\t{Offset: 3},\n\t\th2.NegativeSpans[1],\n\t\th2.NegativeSpans[2],\n\t}\n\tequals(h1, *h2)\n\n\t// All mismatches.\n\tnotEquals(h1, Histogram{})\n\n\th2.Schema = 1\n\tnotEquals(h1, *h2)\n\n\th2 = h1.Copy()\n\th2.Count++\n\tnotEquals(h1, *h2)\n\n\th2 = h1.Copy()\n\th2.Sum++\n\tnotEquals(h1, *h2)\n\n\th2 = h1.Copy()\n\th2.ZeroThreshold++\n\tnotEquals(h1, *h2)\n\n\th2 = h1.Copy()\n\th2.ZeroCount++\n\tnotEquals(h1, *h2)\n\n\t// Changing value of buckets.\n\th2 = h1.Copy()\n\th2.PositiveBuckets[len(h2.PositiveBuckets)-1]++\n\tnotEquals(h1, *h2)\n\th2 = h1.Copy()\n\th2.NegativeBuckets[len(h2.NegativeBuckets)-1]++\n\tnotEquals(h1, *h2)\n\n\t// Changing bucket layout.\n\th2 = h1.Copy()\n\th2.PositiveSpans[1].Offset++\n\tnotEquals(h1, *h2)\n\th2 = h1.Copy()\n\th2.NegativeSpans[1].Offset++\n\tnotEquals(h1, *h2)\n\n\t// Adding an empty bucket.\n\th2 = h1.Copy()\n\th2.PositiveSpans[0].Offset--\n\th2.PositiveSpans[0].Length++\n\th2.PositiveBuckets = append([]int64{0}, h2.PositiveBuckets...)\n\tnotEquals(h1, *h2)\n\th2 = h1.Copy()\n\th2.NegativeSpans[0].Offset--\n\th2.NegativeSpans[0].Length++\n\th2.NegativeBuckets = append([]int64{0}, h2.NegativeBuckets...)\n\tnotEquals(h1, *h2)\n\n\t// Adding new bucket.\n\th2 = h1.Copy()\n\th2.PositiveSpans = append(h2.PositiveSpans, Span{\n\t\tOffset: 1,\n\t\tLength: 1,\n\t})\n\th2.PositiveBuckets = append(h2.PositiveBuckets, 1)\n\tnotEquals(h1, *h2)\n\th2 = h1.Copy()\n\th2.NegativeSpans = append(h2.NegativeSpans, Span{\n\t\tOffset: 1,\n\t\tLength: 1,\n\t})\n\th2.NegativeBuckets = append(h2.NegativeBuckets, 1)\n\tnotEquals(h1, *h2)\n}", "func UpdateHistogram(h *Histogram, feedback *QueryFeedback) *Histogram {\n\tbuckets, isNewBuckets, totalCount := splitBuckets(h, feedback)\n\tbuckets = mergeBuckets(buckets, isNewBuckets, float64(totalCount))\n\thist := buildNewHistogram(h, buckets)\n\t// Update the NDV of primary key column.\n\tif feedback.Tp == PkType {\n\t\thist.NDV = int64(hist.TotalRowCount())\n\t}\n\treturn hist\n}", "func Example_subplot() {\n\tconst npoints = 10000\n\n\t// Create a normal distribution.\n\tdist := distuv.Normal{\n\t\tMu: 0,\n\t\tSigma: 1,\n\t\tSrc: rand.New(rand.NewSource(0)),\n\t}\n\n\t// Draw some random values from the standard\n\t// normal distribution.\n\thist := hbook.NewH1D(20, -4, +4)\n\tfor i := 0; i < npoints; i++ {\n\t\tv := dist.Rand()\n\t\thist.Fill(v, 1)\n\t}\n\n\t// normalize histo\n\tarea := 0.0\n\tfor _, bin := range hist.Binning.Bins {\n\t\tarea += bin.SumW() * bin.XWidth()\n\t}\n\thist.Scale(1 / area)\n\n\t// Make a plot and set its title.\n\tp1 := hplot.New()\n\tp1.Title.Text = \"Histogram\"\n\tp1.X.Label.Text = \"X\"\n\tp1.Y.Label.Text = \"Y\"\n\n\t// Create a histogram of our values drawn\n\t// from the standard normal.\n\th := hplot.NewH1D(hist)\n\tp1.Add(h)\n\n\t// The normal distribution function\n\tnorm := hplot.NewFunction(dist.Prob)\n\tnorm.Color = color.RGBA{R: 255, A: 255}\n\tnorm.Width = vg.Points(2)\n\tp1.Add(norm)\n\n\t// draw a grid\n\tp1.Add(hplot.NewGrid())\n\n\t// make a second plot which will be diplayed in the upper-right\n\t// of the previous one\n\tp2 := hplot.New()\n\tp2.Title.Text = \"Sub plot\"\n\tp2.Add(h)\n\tp2.Add(hplot.NewGrid())\n\n\tconst (\n\t\twidth = 15 * vg.Centimeter\n\t\theight = width / math.Phi\n\t)\n\n\tc := vgimg.PngCanvas{Canvas: vgimg.New(width, height)}\n\tdc := draw.New(c)\n\tp1.Draw(dc)\n\tsub := draw.Canvas{\n\t\tCanvas: dc,\n\t\tRectangle: vg.Rectangle{\n\t\t\tMin: vg.Point{X: 0.70 * width, Y: 0.50 * height},\n\t\t\tMax: vg.Point{X: 1.00 * width, Y: 1.00 * height},\n\t\t},\n\t}\n\tp2.Draw(sub)\n\n\tf, err := os.Create(\"testdata/sub_plot.png\")\n\tif err != nil {\n\t\tlog.Fatalf(\"error: %v\\n\", err)\n\t}\n\tdefer f.Close()\n\t_, err = c.WriteTo(f)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\terr = f.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n}", "func (h *Histogram) Diff(old *Histogram) (*Histogram, error) {\n\tif h.Name != old.Name {\n\t\treturn nil, errors.Errorf(\"unmatched histogram, %s vs %s\", h.Name, old.Name)\n\t}\n\tif len(old.Buckets) > len(h.Buckets) {\n\t\treturn nil, errors.Errorf(\"old histogram has %d bucket(s), new only has %d\", len(old.Buckets), len(h.Buckets))\n\t}\n\n\tdiff := &Histogram{Name: h.Name, Sum: h.Sum - old.Sum}\n\toi := 0\n\tfor _, hb := range h.Buckets {\n\t\t// If we've already looked at all of the old buckets, copy the new bucket over.\n\t\tif oi >= len(old.Buckets) {\n\t\t\tdiff.Buckets = append(diff.Buckets, hb)\n\t\t\tcontinue\n\t\t}\n\n\t\tob := old.Buckets[oi]\n\n\t\tswitch {\n\t\tcase ob.Min < hb.Min:\n\t\t\t// The old histogram shouldn't contain any buckets that aren't in the new one.\n\t\t\treturn nil, errors.Errorf(\"bucket [%d,%d) is present in old histogram but not new one\", ob.Min, ob.Max)\n\t\tcase ob.Min > hb.Min:\n\t\t\t// If this bucket isn't present in the old histogram, just copy it over.\n\t\t\tif ob.Min < hb.Max {\n\t\t\t\treturn nil, errors.Errorf(\"old bucket [%d,%d) overlaps new bucket [%d,%d)\", ob.Min, ob.Max, hb.Min, hb.Max)\n\t\t\t}\n\t\t\tdiff.Buckets = append(diff.Buckets, hb)\n\t\tcase ob.Min == hb.Min:\n\t\t\t// If we're looking at the same bucket in both histograms, save the difference (if any) and move to the next old bucket.\n\t\t\tif ob.Max != hb.Max {\n\t\t\t\treturn nil, errors.Errorf(\"old bucket [%d,%d) doesn't match new bucket [%d,%d)\", ob.Min, ob.Max, hb.Min, hb.Max)\n\t\t\t}\n\t\t\tif hb.Count < ob.Count {\n\t\t\t\treturn nil, errors.Errorf(\"old bucket [%d,%d) has count %d, new only has %d\", ob.Min, ob.Max, ob.Count, hb.Count)\n\t\t\t} else if hb.Count > ob.Count {\n\t\t\t\tdiff.Buckets = append(diff.Buckets, HistogramBucket{hb.Min, hb.Max, hb.Count - ob.Count})\n\t\t\t}\n\t\t\toi++\n\t\t}\n\t}\n\treturn diff, nil\n}", "func (histogram Histogram) Map(min int, max int, width int, height int) {\n\n\t// TODO: Remove floating point operations!\n\n\tsum := 0\n\tscale := float32(max-min)/float32(width*height)\n\t\n\tbinCount := len(histogram)\n\n\tfor i := 0; i < binCount; i++ {\n\t\tsum += histogram[i]\n\t\thistogram[i] = (min + int(float32(sum)*scale))\n\t\tif histogram[i] > max {\n\t\t\thistogram[i] = max\n\t\t}\n\t}\n}", "func (c *ClusterScaling) Merge(b *ClusterScaling) *ClusterScaling {\n\tconfig := *c\n\n\tif b.Enabled {\n\t\tconfig.Enabled = b.Enabled\n\t}\n\n\tif b.MaxSize != 0 {\n\t\tconfig.MaxSize = b.MaxSize\n\t}\n\n\tif b.MinSize != 0 {\n\t\tconfig.MinSize = b.MinSize\n\t}\n\n\tif b.CoolDown != 0 {\n\t\tconfig.CoolDown = b.CoolDown\n\t}\n\n\tif b.NodeFaultTolerance != 0 {\n\t\tconfig.NodeFaultTolerance = b.NodeFaultTolerance\n\t}\n\n\tif b.AutoscalingGroup != \"\" {\n\t\tconfig.AutoscalingGroup = b.AutoscalingGroup\n\t}\n\n\treturn &config\n}", "func newBatchHistogram(desc *Desc, buckets []float64, hasSum bool) *batchHistogram {\n\t// We need to remove -Inf values. runtime/metrics keeps them around.\n\t// But -Inf bucket should not be allowed for prometheus histograms.\n\tif buckets[0] == math.Inf(-1) {\n\t\tbuckets = buckets[1:]\n\t}\n\th := &batchHistogram{\n\t\tdesc: desc,\n\t\tbuckets: buckets,\n\t\t// Because buckets follows runtime/metrics conventions, there's\n\t\t// 1 more value in the buckets list than there are buckets represented,\n\t\t// because in runtime/metrics, the bucket values represent *boundaries*,\n\t\t// and non-Inf boundaries are inclusive lower bounds for that bucket.\n\t\tcounts: make([]uint64, len(buckets)-1),\n\t\thasSum: hasSum,\n\t}\n\th.init(h)\n\treturn h\n}", "func Diff(dst draw.Image, a, b image.Image) image.Rectangle {\n\trect := dst.Bounds().Intersect(a.Bounds()).Intersect(b.Bounds())\n\n\t// Determine greyscale dynamic range.\n\tmin := uint16(math.MaxUint16)\n\tmax := uint16(0)\n\tfor x := rect.Min.X; x < rect.Max.X; x++ {\n\t\tfor y := rect.Min.Y; y < rect.Max.Y; y++ {\n\t\t\tp := diffColor{a.At(x, y), b.At(x, y)}\n\t\t\tg := color.Gray16Model.Convert(p).(color.Gray16)\n\t\t\tif g.Y < min {\n\t\t\t\tmin = g.Y\n\t\t\t}\n\t\t\tif g.Y > max {\n\t\t\t\tmax = g.Y\n\t\t\t}\n\t\t}\n\t}\n\n\t// Render intensity-scaled difference.\n\tfor x := rect.Min.X; x < rect.Max.X; x++ {\n\t\tfor y := rect.Min.Y; y < rect.Max.Y; y++ {\n\t\t\tdst.Set(x, y, scaledColor{\n\t\t\t\tmin: uint32(min), max: uint32(max),\n\t\t\t\tc: diffColor{a.At(x, y), b.At(x, y)},\n\t\t\t})\n\t\t}\n\t}\n\n\treturn rect\n}", "func (h *Histogram) copyHDataFrom(src *Histogram) {\n\tif h.Divider == src.Divider && h.Offset == src.Offset {\n\t\tfor i := 0; i < len(h.Hdata); i++ {\n\t\t\th.Hdata[i] += src.Hdata[i]\n\t\t}\n\t\treturn\n\t}\n\n\thData := src.Export()\n\tfor _, data := range hData.Data {\n\t\th.record((data.Start+data.End)/2, int(data.Count))\n\t}\n}", "func (h *Histogram) filterHistogram(newBuckets []Bucket) *Histogram {\n\tcheckBucketsValid(newBuckets)\n\n\ttotal := int64(0)\n\tfor _, b := range newBuckets {\n\t\ttotal += b.NumEq + b.NumRange\n\t}\n\n\tif total == 0 {\n\t\treturn &Histogram{}\n\t}\n\n\tselectivity := float64(total) / float64(h.RowCount)\n\n\t// Estimate the new DistinctCount based on the selectivity of this filter.\n\t// todo(rytaft): this could be more precise if we take into account the\n\t// null count of the original histogram. This could also be more precise for\n\t// the operators =, !=, in, and not in, since we know how these operators\n\t// should affect the distinct count.\n\tdistinctCount := int64(float64(h.DistinctCount) * selectivity)\n\tif distinctCount == 0 {\n\t\t// There must be at least one distinct value since RowCount > 0.\n\t\tdistinctCount++\n\t}\n\n\treturn &Histogram{\n\t\tRowCount: total,\n\t\tDistinctCount: distinctCount,\n\n\t\t// All the returned rows will be non-null for this column.\n\t\tNullCount: 0,\n\t\tBuckets: newBuckets,\n\t}\n}", "func (b *BarChart) Plot(c draw.Canvas, plt *plot.Plot) {\n\n// fmt.Println (\"Plot start \" )\n\n\ttrCat, trVal := plt.Transforms(&c)\n\tif b.Horizontal {\n\t\ttrCat, trVal = trVal, trCat\n\t}\n\n\tfor i, ht := range b.Values {\n\t\tcatVal := b.XMin + float64(i)\n\t\tcatMin := trCat(float64(catVal))\n\t\tif !b.Horizontal {\n\t\t\tif !c.ContainsX(catMin) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t} else {\n\t\t\tif !c.ContainsY(catMin) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\tcatMin = catMin - b.Width/2 + b.Offset\n\t\tcatMax := catMin + b.Width\n\t\tbottom := b.stackedOn.BarHeight(i)\n\n//\t\tfmt.Println (\"Plot bottom \" ,bottom )\n\n valMin := trVal(bottom+b.Values2[i])\n\n\t\tvalMax := trVal(bottom + ht)\n\n//\t\tfmt.Println (\"Plot valMin \" ,valMin )\n\n\t\tvar pts []vg.Point\n\t\tvar poly []vg.Point\n\t\tif !b.Horizontal {\n\n//\t\t fmt.Println (\"Plot type1 \" )\n\n\t\t\tpts = []vg.Point{\n\t\t\t\t{catMin, valMin},\n\t\t\t\t{catMin, valMax},\n\t\t\t\t{catMax, valMax},\n\t\t\t\t{catMax, valMin},\n\t\t\t}\n\t\t\tpoly = c.ClipPolygonY(pts)\n\t\t} else { // case XY\n\n//\t\t fmt.Println (\"Plot type2 \" )\n\n\t\t\tpts = []vg.Point{\n\t\t\t\t{valMin, catMin},\n\t\t\t\t{valMin, catMax},\n// {valMin+50, catMin},\n//\t\t\t\t{valMin+50, catMax},\n\t\t\t\t{valMax, catMax},\n\t\t\t\t{valMax, catMin},\n\t\t\t}\n\n//\t\t\tfmt.Println (\"Plot 1\" ,valMin, catMin)\n// fmt.Println (\"Plot 2\" ,valMin, catMax)\n// fmt.Println (\"Plot 3\" ,valMax, catMax)\n// fmt.Println (\"Plot 4\" ,valMax, catMax)\n\n\t\t\tpoly = c.ClipPolygonX(pts)\n\t\t}\n\n//\t\tfmt.Println (\"Plot pts \" ,pts )\n\n\t\tc.FillPolygon(b.Color, poly)\n\n\t\tvar outline [][]vg.Point\n\t\tif !b.Horizontal { // case normal\n\n\t\t\tpts = append(pts, vg.Point{X: catMin, Y: valMin})\n\t\t\toutline = c.ClipLinesY(pts)\n\t\t} else { // case vy\n\n\t\t\tpts = append(pts, vg.Point{X: valMin, Y: catMin})\n\t\t\toutline = c.ClipLinesX(pts)\n\t\t}\n\n//\t\tfmt.Println (\"Plot outline \" ,outline )\n\n\t\tc.StrokeLines(b.LineStyle, outline...)\n\t}\n\n//\tfmt.Println (\"Plot end \" ) // 繝・・ス・ス繝・・ス・ス\n\n}", "func (bm *BoundedMean) Merge(bm2 *BoundedMean) error {\n\tif err := checkMergeBoundedMean(bm, bm2); err != nil {\n\t\treturn err\n\t}\n\tbm.NormalizedSum.Merge(&bm2.NormalizedSum)\n\tbm.Count.Merge(&bm2.Count)\n\tbm2.state = merged\n\treturn nil\n}", "func Merge(metrics map[string]metricSource.MetricData, other map[string]metricSource.MetricData) map[string]metricSource.MetricData {\n\tresult := make(map[string]metricSource.MetricData, len(metrics)+len(other))\n\tfor k, v := range metrics {\n\t\tresult[k] = v\n\t}\n\tfor k, v := range other {\n\t\tresult[k] = v\n\t}\n\treturn result\n}", "func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tq := vals[0].(Vector)[0].F\n\tinVec := vals[1].(Vector)\n\n\tif enh.signatureToMetricWithBuckets == nil {\n\t\tenh.signatureToMetricWithBuckets = map[string]*metricWithBuckets{}\n\t} else {\n\t\tfor _, v := range enh.signatureToMetricWithBuckets {\n\t\t\tv.buckets = v.buckets[:0]\n\t\t}\n\t}\n\n\tvar histogramSamples []Sample\n\n\tfor _, sample := range inVec {\n\t\t// We are only looking for conventional buckets here. Remember\n\t\t// the histograms for later treatment.\n\t\tif sample.H != nil {\n\t\t\thistogramSamples = append(histogramSamples, sample)\n\t\t\tcontinue\n\t\t}\n\n\t\tupperBound, err := strconv.ParseFloat(\n\t\t\tsample.Metric.Get(model.BucketLabel), 64,\n\t\t)\n\t\tif err != nil {\n\t\t\t// Oops, no bucket label or malformed label value. Skip.\n\t\t\t// TODO(beorn7): Issue a warning somehow.\n\t\t\tcontinue\n\t\t}\n\t\tenh.lblBuf = sample.Metric.BytesWithoutLabels(enh.lblBuf, labels.BucketLabel)\n\t\tmb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]\n\t\tif !ok {\n\t\t\tsample.Metric = labels.NewBuilder(sample.Metric).\n\t\t\t\tDel(excludedLabels...).\n\t\t\t\tLabels()\n\n\t\t\tmb = &metricWithBuckets{sample.Metric, nil}\n\t\t\tenh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb\n\t\t}\n\t\tmb.buckets = append(mb.buckets, bucket{upperBound, sample.F})\n\n\t}\n\n\t// Now deal with the histograms.\n\tfor _, sample := range histogramSamples {\n\t\t// We have to reconstruct the exact same signature as above for\n\t\t// a conventional histogram, just ignoring any le label.\n\t\tenh.lblBuf = sample.Metric.Bytes(enh.lblBuf)\n\t\tif mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]; ok && len(mb.buckets) > 0 {\n\t\t\t// At this data point, we have conventional histogram\n\t\t\t// buckets and a native histogram with the same name and\n\t\t\t// labels. Do not evaluate anything.\n\t\t\t// TODO(beorn7): Issue a warning somehow.\n\t\t\tdelete(enh.signatureToMetricWithBuckets, string(enh.lblBuf))\n\t\t\tcontinue\n\t\t}\n\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(sample.Metric),\n\t\t\tF: histogramQuantile(q, sample.H),\n\t\t})\n\t}\n\n\tfor _, mb := range enh.signatureToMetricWithBuckets {\n\t\tif len(mb.buckets) > 0 {\n\t\t\tenh.Out = append(enh.Out, Sample{\n\t\t\t\tMetric: mb.metric,\n\t\t\t\tF: bucketQuantile(q, mb.buckets),\n\t\t\t})\n\t\t}\n\t}\n\n\treturn enh.Out\n}", "func (_m *Reporter) Histogram(name string, value float64, tags ...monitoring.Tag) {\n\t_va := make([]interface{}, len(tags))\n\tfor _i := range tags {\n\t\t_va[_i] = tags[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, name, value)\n\t_ca = append(_ca, _va...)\n\t_m.Called(_ca...)\n}", "func histogramRate(points []HPoint, isCounter bool) *histogram.FloatHistogram {\n\tprev := points[0].H\n\tlast := points[len(points)-1].H\n\tif last == nil {\n\t\treturn nil // Range contains a mix of histograms and floats.\n\t}\n\tminSchema := prev.Schema\n\tif last.Schema < minSchema {\n\t\tminSchema = last.Schema\n\t}\n\n\t// First iteration to find out two things:\n\t// - What's the smallest relevant schema?\n\t// - Are all data points histograms?\n\t// TODO(beorn7): Find a way to check that earlier, e.g. by handing in a\n\t// []FloatPoint and a []HistogramPoint separately.\n\tfor _, currPoint := range points[1 : len(points)-1] {\n\t\tcurr := currPoint.H\n\t\tif curr == nil {\n\t\t\treturn nil // Range contains a mix of histograms and floats.\n\t\t}\n\t\t// TODO(trevorwhitney): Check if isCounter is consistent with curr.CounterResetHint.\n\t\tif !isCounter {\n\t\t\tcontinue\n\t\t}\n\t\tif curr.Schema < minSchema {\n\t\t\tminSchema = curr.Schema\n\t\t}\n\t}\n\n\th := last.CopyToSchema(minSchema)\n\th.Sub(prev)\n\n\tif isCounter {\n\t\t// Second iteration to deal with counter resets.\n\t\tfor _, currPoint := range points[1:] {\n\t\t\tcurr := currPoint.H\n\t\t\tif curr.DetectReset(prev) {\n\t\t\t\th.Add(prev)\n\t\t\t}\n\t\t\tprev = curr\n\t\t}\n\t}\n\n\th.CounterResetHint = histogram.GaugeType\n\treturn h.Compact(0)\n}", "func (h HyperparameterV0) Merge(other interface{}) interface{} {\n\t// Never merge partial hyperparameters.\n\treturn h\n}", "func (h *Histogram) Add(value int64) {\n\tfor i := range h.labels {\n\t\tif i == len(h.labels)-1 || value <= h.cutoffs[i] {\n\t\t\th.buckets[i].Add(1)\n\t\t\th.total.Add(value)\n\t\t\tbreak\n\t\t}\n\t}\n\tif h.hook != nil {\n\t\th.hook(value)\n\t}\n\tif defaultStatsdHook.histogramHook != nil && h.name != \"\" {\n\t\tdefaultStatsdHook.histogramHook(h.name, value)\n\t}\n}", "func splitMetrics(size int, toSplit pdata.Metrics) pdata.Metrics {\n\tif toSplit.MetricCount() <= size {\n\t\treturn toSplit\n\t}\n\tcopiedMetrics := 0\n\tresult := pdata.NewMetrics()\n\tresult.ResourceMetrics().Resize(toSplit.ResourceMetrics().Len())\n\trms := toSplit.ResourceMetrics()\n\n\trmsCount := 0\n\tfor i := rms.Len() - 1; i >= 0; i-- {\n\t\trmsCount++\n\t\trm := rms.At(i)\n\t\tdestRs := result.ResourceMetrics().At(result.ResourceMetrics().Len() - 1 - i)\n\t\trm.Resource().CopyTo(destRs.Resource())\n\n\t\tdestRs.InstrumentationLibraryMetrics().Resize(rm.InstrumentationLibraryMetrics().Len())\n\n\t\tilmCount := 0\n\t\tfor j := rm.InstrumentationLibraryMetrics().Len() - 1; j >= 0; j-- {\n\t\t\tilmCount++\n\t\t\tinstMetrics := rm.InstrumentationLibraryMetrics().At(j)\n\t\t\tdestInstMetrics := destRs.InstrumentationLibraryMetrics().At(destRs.InstrumentationLibraryMetrics().Len() - 1 - j)\n\t\t\tinstMetrics.InstrumentationLibrary().CopyTo(destInstMetrics.InstrumentationLibrary())\n\n\t\t\tif size-copiedMetrics >= instMetrics.Metrics().Len() {\n\t\t\t\tdestInstMetrics.Metrics().Resize(instMetrics.Metrics().Len())\n\t\t\t} else {\n\t\t\t\tdestInstMetrics.Metrics().Resize(size - copiedMetrics)\n\t\t\t}\n\t\t\tfor k, destIdx := instMetrics.Metrics().Len()-1, 0; k >= 0 && copiedMetrics < size; k, destIdx = k-1, destIdx+1 {\n\t\t\t\tmetric := instMetrics.Metrics().At(k)\n\t\t\t\tmetric.CopyTo(destInstMetrics.Metrics().At(destIdx))\n\t\t\t\tcopiedMetrics++\n\t\t\t\t// remove metric\n\t\t\t\tinstMetrics.Metrics().Resize(instMetrics.Metrics().Len() - 1)\n\t\t\t}\n\t\t\tif instMetrics.Metrics().Len() == 0 {\n\t\t\t\trm.InstrumentationLibraryMetrics().Resize(rm.InstrumentationLibraryMetrics().Len() - 1)\n\t\t\t}\n\t\t\tif copiedMetrics == size {\n\t\t\t\tresult.ResourceMetrics().Resize(rmsCount)\n\t\t\t\treturn result\n\t\t\t}\n\t\t}\n\t\tdestRs.InstrumentationLibraryMetrics().Resize(ilmCount)\n\t\tif rm.InstrumentationLibraryMetrics().Len() == 0 {\n\t\t\trms.Resize(rms.Len() - 1)\n\t\t}\n\t}\n\tresult.ResourceMetrics().Resize(rmsCount)\n\treturn result\n}", "func getHistogram(src [][3]int, size float64, pixels *[HistSize][3]float64, hist *[HistSize]float64) {\n\tvar ind, r, g, b, i int\n\tvar inr, ing, inb int\n\n\tfor i = range src {\n\t\tr = src[i][0]\n\t\tg = src[i][1]\n\t\tb = src[i][2]\n\n\t\tinr = r >> Shift\n\t\ting = g >> Shift\n\t\tinb = b >> Shift\n\n\t\tind = (inr << (2 * HistBits)) + (ing << HistBits) + inb\n\t\tpixels[ind][0], pixels[ind][1], pixels[ind][2] = float64(r), float64(g), float64(b)\n\t\thist[ind]++\n\t}\n\n\t// normalize weight by the number of pixels in the image\n\tfor i = 0; i < HistSize; i++ {\n\t\thist[i] /= size\n\t}\n}", "func (d TestSink) Histogram(c *telemetry.Context, stat string, value float64) {\n\td[stat] = TestMetric{\"Histogram\", value, c.Tags()}\n}", "func NewMultiHistogram(name string, histograms ...Histogram) Histogram {\n\treturn &multiHistogram{\n\t\tname: name,\n\t\ta: histograms,\n\t}\n}", "func (bench *Stopwatch) HistogramClamp(binCount int, min, max time.Duration) *Histogram {\n\tbench.mustBeCompleted()\n\n\tdurations := make([]time.Duration, 0, len(bench.spans))\n\tfor _, span := range bench.spans {\n\t\tduration := span.Duration()\n\t\tif duration < min {\n\t\t\tdurations = append(durations, min)\n\t\t} else {\n\t\t\tdurations = append(durations, duration)\n\t\t}\n\t}\n\n\topts := defaultOptions\n\topts.BinCount = binCount\n\topts.ClampMaximum = float64(max.Nanoseconds())\n\topts.ClampPercentile = 0\n\n\treturn NewDurationHistogram(durations, &opts)\n}", "func (sc *Scene) getHistograms(galaxy *galaxy.Galaxy) {\n\tlumHist := make([]int, 256)\n\tlumSCHist := make([]int, 256)\n\tlumDfltHist := make(map[float64]int)\n//\ttypeHist := make(map[int]int)\n\tvar maxlum float64\n\n\tfor _, t := range galaxy.StarTypes.Types {\n\t//\ttypeHist[t.Type] = 0\n\t\tlumDfltHist[t.Luminosity] = 0\n\t}\n\n\t// find maximum lumen\n\tfor _, s := range galaxy.Systems {\n\t\tif s.Lum > maxlum {\n\t\t\tmaxlum = s.Lum\n\t\t}\n\n\t\t//typeHist[s.CenterObject.Type()]++\n\t\tlumDfltHist[s.CenterObject.Lum()]++\n\t}\n\n\t// Lum histogram\n\tfor _, s := range galaxy.Systems {\n\t\tlumHist[int(s.Lum*255/maxlum)]++\n\t}\n\tfmt.Println(maxlum, lumHist)\n\n\tmaxlum = 0\n\t// lum histogram in SC\n\tfor _, l := range sc.Lums {\n\t\tif l > maxlum {\n\t\t\tmaxlum = l\n\t\t}\n\t}\n\tfmt.Println(\"maxlum sc\", maxlum)\n\n\tfor _, l := range sc.Lums {\n\t\tlumSCHist[int(l*255/maxlum)]++\n\t}\n\n//\tfmt.Println(typeHist)\n\tfmt.Println(lumDfltHist)\n\tfmt.Println(maxlum, lumSCHist)\n\t// ------\n\n}", "func TestMinMaxDifferentPeriods(t *testing.T) {\n\tacc := testutil.Accumulator{}\n\tminmax, err := newMinMax()\n\trequire.NoError(t, err)\n\tminmax.Add(m1)\n\tminmax.Push(&acc)\n\texpectedFields := map[string]interface{}{\n\t\t\"a_max\": int64(1),\n\t\t\"a_min\": int64(1),\n\t\t\"b_max\": int64(1),\n\t\t\"b_min\": int64(1),\n\t\t\"c_max\": int64(1),\n\t\t\"c_min\": int64(1),\n\t\t\"d_max\": int64(1),\n\t\t\"d_min\": int64(1),\n\t\t\"e_max\": int64(1),\n\t\t\"e_min\": int64(1),\n\t\t\"f_max\": int64(2),\n\t\t\"f_min\": int64(2),\n\t\t\"g_max\": int64(2),\n\t\t\"g_min\": int64(2),\n\t\t\"h_max\": int64(2),\n\t\t\"h_min\": int64(2),\n\t\t\"i_max\": int64(2),\n\t\t\"i_min\": int64(2),\n\t\t\"j_max\": int64(3),\n\t\t\"j_min\": int64(3),\n\t}\n\texpectedTags := map[string]string{\n\t\t\"foo\": \"bar\",\n\t}\n\tacc.AssertContainsTaggedFields(t, \"m1\", expectedFields, expectedTags)\n\n\tacc.ClearMetrics()\n\tminmax.Reset()\n\tminmax.Add(m2)\n\tminmax.Push(&acc)\n\texpectedFields = map[string]interface{}{\n\t\t\"a_max\": int64(1),\n\t\t\"a_min\": int64(1),\n\t\t\"b_max\": int64(3),\n\t\t\"b_min\": int64(3),\n\t\t\"c_max\": int64(3),\n\t\t\"c_min\": int64(3),\n\t\t\"d_max\": int64(3),\n\t\t\"d_min\": int64(3),\n\t\t\"e_max\": int64(3),\n\t\t\"e_min\": int64(3),\n\t\t\"f_max\": int64(1),\n\t\t\"f_min\": int64(1),\n\t\t\"g_max\": int64(1),\n\t\t\"g_min\": int64(1),\n\t\t\"h_max\": int64(1),\n\t\t\"h_min\": int64(1),\n\t\t\"i_max\": int64(1),\n\t\t\"i_min\": int64(1),\n\t\t\"j_max\": int64(1),\n\t\t\"j_min\": int64(1),\n\t\t\"k_max\": int64(200),\n\t\t\"k_min\": int64(200),\n\t\t\"l_max\": int64(200),\n\t\t\"l_min\": int64(200),\n\t}\n\texpectedTags = map[string]string{\n\t\t\"foo\": \"bar\",\n\t}\n\tacc.AssertContainsTaggedFields(t, \"m1\", expectedFields, expectedTags)\n}", "func NewHistogram(Offset float64, Divider float64) *Histogram {\n\th := new(Histogram)\n\th.Offset = Offset\n\tif Divider == 0 {\n\t\treturn nil\n\t}\n\th.Divider = Divider\n\th.Hdata = make([]int32, numBuckets)\n\treturn h\n}", "func ParseHistogram(scale string, histo *dto.Histogram) (buckets HistogramBuckets, err error) {\n\tif histo == nil {\n\t\treturn nil, errors.New(\"nil Histogram\")\n\t}\n\n\ttotal := *histo.SampleCount\n\tn := len(histo.Bucket)\n\n\tbuckets = make(HistogramBuckets, n+1)\n\tbuckets[n] = HistogramBucket{\n\t\tScale: scale,\n\t\tUpperBound: math.MaxFloat64,\n\t\tCount: total,\n\t}\n\tfor idx, bv := range histo.Bucket {\n\t\tbuckets[idx] = HistogramBucket{\n\t\t\tScale: scale,\n\t\t\tUpperBound: *bv.UpperBound,\n\t\t\tCount: *bv.CumulativeCount,\n\t\t}\n\t}\n\tfor idx := n; idx > 0; idx-- { // start from last, end at second to last\n\t\t// convert cumulative count to per-bucket count\n\t\tbuckets[idx].Count = buckets[idx].Count - buckets[idx-1].Count\n\t\t// use previous bucket upper bound as lower bound\n\t\tbuckets[idx].LowerBound = buckets[idx-1].UpperBound\n\t}\n\n\tsort.Sort(HistogramBuckets(buckets))\n\treturn buckets, nil\n}", "func (cm *customMetrics) AddHistogram(\n\tnamespace, subsystem, name, help, internalKey string,\n\tconstLabels prometheus.Labels, buckets []float64) {\n\n\tcm.histograms[internalKey] = promauto.NewHistogram(prometheus.HistogramOpts{\n\t\tNamespace: namespace,\n\t\tSubsystem: subsystem,\n\t\tName: name,\n\t\tHelp: help,\n\t\tConstLabels: constLabels,\n\t\tBuckets: buckets,\n\t})\n}", "func (c *ConfigureOpener) Merge(other ConfigureOpener) {\n\tif c.ErrorThresholdPercentage == 0 {\n\t\tc.ErrorThresholdPercentage = other.ErrorThresholdPercentage\n\t}\n\tif c.RequestVolumeThreshold == 0 {\n\t\tc.RequestVolumeThreshold = other.RequestVolumeThreshold\n\t}\n\tif c.Now == nil {\n\t\tc.Now = other.Now\n\t}\n\tif c.RollingDuration == 0 {\n\t\tc.RollingDuration = other.RollingDuration\n\t}\n\tif c.NumBuckets == 0 {\n\t\tc.NumBuckets = other.NumBuckets\n\t}\n}", "func (s *Summary) merge(o *Summary) {\n\tif o.TimeElapsed > s.TimeElapsed {\n\t\ts.TimeElapsed = o.TimeElapsed\n\t}\n\ts.SuccessHistogram.Merge(o.SuccessHistogram)\n\ts.UncorrectedSuccessHistogram.Merge(o.UncorrectedSuccessHistogram)\n\ts.ErrorHistogram.Merge(o.ErrorHistogram)\n\ts.UncorrectedErrorHistogram.Merge(o.UncorrectedErrorHistogram)\n\ts.SuccessTotal += o.SuccessTotal\n\ts.ErrorTotal += o.ErrorTotal\n\ts.Throughput += o.Throughput\n\ts.RequestRate += o.RequestRate\n}", "func Copy(h *hdrhistogram.Histogram) *hdrhistogram.Histogram {\n\tdup := hdrhistogram.New(h.LowestTrackableValue(), h.HighestTrackableValue(),\n\t\tint(h.SignificantFigures()))\n\tdup.Merge(h)\n\treturn dup\n}", "func (j *JobScaling) Merge(b *JobScaling) *JobScaling {\n\tconfig := *j\n\n\tif b.Enabled {\n\t\tconfig.Enabled = b.Enabled\n\t}\n\n\tif b.ConsulToken != \"\" {\n\t\tconfig.ConsulToken = b.ConsulToken\n\t}\n\n\tif b.ConsulKeyLocation != \"\" {\n\t\tconfig.ConsulKeyLocation = b.ConsulKeyLocation\n\t}\n\n\treturn &config\n}", "func (m *MetricsManager) AddHistogram(name, help string, labelNames []string, buckets []float64) error {\n\tvar allLabels sort.StringSlice\n\tfor k := range m.commonLabels {\n\t\tallLabels = append(allLabels, k)\n\t}\n\tallLabels = append(allLabels, labelNames...)\n\tallLabels.Sort()\n\n\topts := prometheus.HistogramOpts{\n\t\tName: name,\n\t\tHelp: help,\n\t}\n\tif buckets != nil {\n\t\topts.Buckets = buckets\n\t}\n\n\tmetric := prometheus.NewHistogramVec(opts, allLabels)\n\tif err := prometheus.Register(metric); err != nil {\n\t\treturn err\n\t}\n\n\tpartialMetric, err := metric.CurryWith(m.commonLabels)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tm.histograms[name] = &Histogram{\n\t\tcreationTime: time.Now(),\n\t\tmetric: partialMetric,\n\t}\n\treturn nil\n}", "func mergeSeries(dst, src *influxdb.Series) {\n\tdst.Points = append(dst.Points, src.Points...)\n}", "func Blended(img1, img2 image.Image, threshold int) image.Image {\n\tt := float64(threshold)\n\n\tblend := image.NewNRGBA(img1.Bounds())\n\tif !img1.Bounds().Eq(img2.Bounds()) {\n\t\treturn blend\n\t}\n\n\tb := img1.Bounds()\n\tfor x := b.Min.X; x <= b.Max.X; x++ {\n\t\tfor y := b.Min.Y; y <= b.Max.Y; y++ {\n\t\t\tavg1 := average(img1.At(x, y))\n\t\t\tavg2 := average(img2.At(x, y))\n\t\t\tdiff := math.Abs(avg1 - avg2)\n\t\t\tvar c color.Color\n\t\t\tif diff > t {\n\t\t\t\tc = color.White\n\t\t\t} else {\n\t\t\t\tc = color.Black\n\t\t\t}\n\t\t\tblend.Set(x, y, c)\n\t\t}\n\t}\n\treturn blend\n}", "func NewGenericHistogram(name, help string, cutoffs []int64, labels []string, countLabel, totalLabel string) *Histogram {\n\tif len(cutoffs) != len(labels)-1 {\n\t\tpanic(\"mismatched cutoff and label lengths\")\n\t}\n\th := &Histogram{\n\t\tname: name,\n\t\thelp: help,\n\t\tcutoffs: cutoffs,\n\t\tlabels: labels,\n\t\tcountLabel: countLabel,\n\t\ttotalLabel: totalLabel,\n\t\tbuckets: make([]atomic.Int64, len(labels)),\n\t}\n\tif name != \"\" {\n\t\tpublish(name, h)\n\t}\n\treturn h\n}", "func (c *LoggerClient) Histogram(name string, value float64) {\n\tc.print(\"Histogram\", name, value, value)\n}", "func BenchmarkFibHeap_Merge(b *testing.B) {\r\n\theap1 := NewFloatFibHeap()\r\n\theap2 := NewFloatFibHeap()\r\n\r\n\tfor i := 0; i < b.N; i++ {\r\n\t\theap1.Enqueue(2 * 1E10 * (rand.Float64() - 0.5))\r\n\t\theap2.Enqueue(2 * 1E10 * (rand.Float64() - 0.5))\r\n\t\t_, err := heap1.Merge(&heap2)\r\n\t\tassert.NoError(b, err)\r\n\t}\r\n}", "func (h *batchHistogram) update(his *metrics.Float64Histogram, sum float64) {\n\tcounts, buckets := his.Counts, his.Buckets\n\n\th.mu.Lock()\n\tdefer h.mu.Unlock()\n\n\t// Clear buckets.\n\tfor i := range h.counts {\n\t\th.counts[i] = 0\n\t}\n\t// Copy and reduce buckets.\n\tvar j int\n\tfor i, count := range counts {\n\t\th.counts[j] += count\n\t\tif buckets[i+1] == h.buckets[j+1] {\n\t\t\tj++\n\t\t}\n\t}\n\tif h.hasSum {\n\t\th.sum = sum\n\t}\n}", "func Compare(countsMap1 map[string]interface{}, countsMap2 map[string]interface{}) map[string]interface{} {\n\tcdpFinalMap := make(map[string]interface{})\n\t//TODO: This only includes if BOTH maps have a non-zero count alignment - perhaps should mod?\n\tfor header, countStats := range countsMap1 {\n\t\tif countStats2, ok := countsMap2[header]; ok {\n\t\t\tswitch v := countStats.(type) {\n\t\t\tcase meanSe:\n\t\t\t\tcdpFinalMap[header] = compMeanSeOutput{}\n\t\t\t\tcdpFinalMap[header] = cdpFinalMap[header].(compMeanSeOutput).append(\n\t\t\t\t\tv.Mean,\n\t\t\t\t\tv.Se,\n\t\t\t\t\tcountStats2.(meanSe).Mean,\n\t\t\t\t\tcountStats2.(meanSe).Se)\n\t\t\tcase []float64:\n\n\t\t\t\tcdpFinalMap[header] = countsOutput{}\n\t\t\t\tpos := 0\n\t\t\t\tfor pos < len(v) {\n\t\t\t\t\tcdpFinalMap[header] = cdpFinalMap[header].(countsOutput).append(v[pos])\n\t\t\t\t\tpos++\n\t\t\t\t}\n\t\t\t\tpos = 0\n\t\t\t\tfor pos < len(countStats2.([]float64)) {\n\t\t\t\t\tcdpFinalMap[header] = cdpFinalMap[header].(countsOutput).append(countStats2.([]float64)[pos])\n\t\t\t\t\tpos++\n\t\t\t\t}\n\n\t\t\t}\n\t\t}\n\t}\n\treturn cdpFinalMap\n}", "func (h *sizeHistogram) add(size int64) {\n\t// Fetch the histogram interval corresponding\n\t// to the passed object size.\n\tfor i, interval := range ObjectsHistogramIntervals {\n\t\tif size >= interval.start && size <= interval.end {\n\t\t\th[i]++\n\t\t\tbreak\n\t\t}\n\t}\n}", "func (es HistogramBucketSlice) CopyTo(dest HistogramBucketSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint_Bucket(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func CompareSummary(a Summary, b Summary) (c SummaryCompare, err error) {\n\tif len(a.Histogram) != len(b.Histogram) {\n\t\treturn SummaryCompare{}, fmt.Errorf(\"len(a.Histogram) %d != len(b.Histogram) %d\", len(a.Histogram), len(b.Histogram))\n\t}\n\n\tc = SummaryCompare{\n\t\tA: a,\n\t\tB: b,\n\t}\n\n\t// e.g. \"A\" 100, \"B\" 50 == -50%\n\t// e.g. \"A\" 50, \"B\" 100 == 100%\n\tdeltaP50 := float64(b.P50) - float64(a.P50)\n\tdeltaP50 /= float64(a.P50)\n\tdeltaP50 *= 100.0\n\tdeltaP50 = convertInvalid(deltaP50)\n\n\tdeltaP90 := float64(b.P90) - float64(a.P90)\n\tdeltaP90 /= float64(a.P90)\n\tdeltaP90 *= 100.0\n\tdeltaP90 = convertInvalid(deltaP90)\n\n\tdeltaP99 := float64(b.P99) - float64(a.P99)\n\tdeltaP99 /= float64(a.P99)\n\tdeltaP99 *= 100.0\n\tdeltaP99 = convertInvalid(deltaP99)\n\n\tdeltaP999 := float64(b.P999) - float64(a.P999)\n\tdeltaP999 /= float64(a.P999)\n\tdeltaP999 *= 100.0\n\tdeltaP999 = convertInvalid(deltaP999)\n\n\tdeltaP9999 := float64(b.P9999) - float64(a.P9999)\n\tdeltaP9999 /= float64(a.P9999)\n\tdeltaP9999 *= 100.0\n\tdeltaP9999 = convertInvalid(deltaP9999)\n\n\tc.P50DeltaPercent = deltaP50\n\tc.P90DeltaPercent = deltaP90\n\tc.P99DeltaPercent = deltaP99\n\tc.P999DeltaPercent = deltaP999\n\tc.P9999DeltaPercent = deltaP9999\n\n\treturn c, nil\n}", "func scale(a, maxA, maxB int) float64 {\n\treturn (float64(a) / float64(maxA)) * float64(maxB) + 1\n}", "func (c *Aggregator) Histogram() (aggregation.Buckets, error) {\n\treturn aggregation.Buckets{\n\t\tBoundaries: c.boundaries,\n\t\tCounts: c.state.bucketCounts,\n\t}, nil\n}", "func fillHistograms(r, g, b *Histo, workers int) {\n\tbar, _ = barcli.New(int(tries * width * height))\n\tgo func(bar *barcli.Bar) {\n\t\tfor {\n\t\t\tif bar.Done() {\n\t\t\t\treturn\n\t\t\t}\n\t\t\tbar.Print()\n\t\t\ttime.Sleep(1000 * time.Millisecond)\n\t\t}\n\t}(bar)\n\twg := new(sync.WaitGroup)\n\twg.Add(workers)\n\tshare := int((tries)*width*height) / workers\n\tfor n := 0; n < workers; n++ {\n\t\t// Our worker channel to send our orbits on!\n\t\trng := rand7i.NewComplexRNG(int64(n + 1))\n\t\tgo arbitrary(r, g, b, &rng, share, wg)\n\t}\n\twg.Wait()\n\tbar.SetMax()\n\tbar.Print()\n}", "func (r Rectangle) Combine(r1 Rectangle) Rectangle {\n\tif r.Empty() {\n\t\treturn r1\n\t}\n\tif r1.Empty() {\n\t\treturn r\n\t}\n\tif r.Min.X > r1.Min.X {\n\t\tr.Min.X = r1.Min.X\n\t}\n\tif r.Min.Y > r1.Min.Y {\n\t\tr.Min.Y = r1.Min.Y\n\t}\n\tif r.Max.X < r1.Max.X {\n\t\tr.Max.X = r1.Max.X\n\t}\n\tif r.Max.Y < r1.Max.Y {\n\t\tr.Max.Y = r1.Max.Y\n\t}\n\treturn r\n}", "func (c *StatsClient) Histogram(name string, value float64) {\n\tif err := c.client.Histogram(name, value, c.tags, Rate); err != nil {\n\t\tc.logger().Printf(\"datadog.StatsClient.Histogram error: %s\", err)\n\t}\n}", "func (src *prometheusMetricsSource) buildHistos(name string, m *dto.Metric, now int64, tags map[string]string) []*MetricPoint {\n\tvar result []*MetricPoint\n\tfor _, b := range m.GetHistogram().Bucket {\n\t\tpoint := src.metricPoint(name+\".\"+fmt.Sprint(b.GetUpperBound()), float64(b.GetCumulativeCount()), now, src.source, tags)\n\t\tresult = src.filterAppend(result, point)\n\t}\n\tpoint := src.metricPoint(name+\".count\", float64(m.GetHistogram().GetSampleCount()), now, src.source, tags)\n\tresult = src.filterAppend(result, point)\n\tpoint = src.metricPoint(name+\".sum\", float64(m.GetHistogram().GetSampleSum()), now, src.source, tags)\n\tresult = src.filterAppend(result, point)\n\treturn result\n}", "func addScaledSlice(y, x []float64, a float64)", "func d2combineRect(rectA, rectB *d2rectT) d2rectT {\n\tvar newRect d2rectT\n\n\tfor index := 0; index < d2numDims; index++ {\n\t\tnewRect.min[index] = d2fmin(rectA.min[index], rectB.min[index])\n\t\tnewRect.max[index] = d2fmax(rectA.max[index], rectB.max[index])\n\t}\n\n\treturn newRect\n}", "func newResourceDelta(\n\ta *resource,\n\tb *resource,\n) *ackcompare.Delta {\n\tdelta := ackcompare.NewDelta()\n\tif (a == nil && b != nil) ||\n\t\t(a != nil && b == nil) {\n\t\tdelta.Add(\"\", a, b)\n\t\treturn delta\n\t}\n\tcustomSetDefaults(a, b)\n\n\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig, b.ko.Spec.HyperParameterTuningJobConfig) {\n\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig\", a.ko.Spec.HyperParameterTuningJobConfig, b.ko.Spec.HyperParameterTuningJobConfig)\n\t} else if a.ko.Spec.HyperParameterTuningJobConfig != nil && b.ko.Spec.HyperParameterTuningJobConfig != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName != *b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.MetricName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != nil && b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type != *b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type\", a.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type, b.ko.Spec.HyperParameterTuningJobConfig.HyperParameterTuningJobObjective.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges != nil && b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges != nil {\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.CategoricalParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.ContinuousParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges\", a.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges, b.ko.Spec.HyperParameterTuningJobConfig.ParameterRanges.IntegerParameterRanges)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs != *b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxNumberOfTrainingJobs)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != nil && b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs != *b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs\", a.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs, b.ko.Spec.HyperParameterTuningJobConfig.ResourceLimits.MaxParallelTrainingJobs)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.Strategy\", a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.Strategy != nil && b.ko.Spec.HyperParameterTuningJobConfig.Strategy != nil {\n\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.Strategy != *b.ko.Spec.HyperParameterTuningJobConfig.Strategy {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.Strategy\", a.ko.Spec.HyperParameterTuningJobConfig.Strategy, b.ko.Spec.HyperParameterTuningJobConfig.Strategy)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType\", a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != nil && b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != nil {\n\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType != *b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType\", a.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType, b.ko.Spec.HyperParameterTuningJobConfig.TrainingJobEarlyStoppingType)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria) {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria)\n\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria != nil && b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue) {\n\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue)\n\t\t\t} else if a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != nil && b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != nil {\n\t\t\t\tif *a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue != *b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue {\n\t\t\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue\", a.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue, b.ko.Spec.HyperParameterTuningJobConfig.TuningJobCompletionCriteria.TargetObjectiveMetricValue)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName) {\n\t\tdelta.Add(\"Spec.HyperParameterTuningJobName\", a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName)\n\t} else if a.ko.Spec.HyperParameterTuningJobName != nil && b.ko.Spec.HyperParameterTuningJobName != nil {\n\t\tif *a.ko.Spec.HyperParameterTuningJobName != *b.ko.Spec.HyperParameterTuningJobName {\n\t\t\tdelta.Add(\"Spec.HyperParameterTuningJobName\", a.ko.Spec.HyperParameterTuningJobName, b.ko.Spec.HyperParameterTuningJobName)\n\t\t}\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition, b.ko.Spec.TrainingJobDefinition) {\n\t\tdelta.Add(\"Spec.TrainingJobDefinition\", a.ko.Spec.TrainingJobDefinition, b.ko.Spec.TrainingJobDefinition)\n\t} else if a.ko.Spec.TrainingJobDefinition != nil && b.ko.Spec.TrainingJobDefinition != nil {\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.AlgorithmName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.MetricDefinitions)\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingImage)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != nil && b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode != *b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode\", a.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode, b.ko.Spec.TrainingJobDefinition.AlgorithmSpecification.TrainingInputMode)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig, b.ko.Spec.TrainingJobDefinition.CheckpointConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig, b.ko.Spec.TrainingJobDefinition.CheckpointConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.LocalPath\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath != *b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.LocalPath\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.LocalPath)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.S3URI\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != nil && b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI != *b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.CheckpointConfig.S3URI\", a.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI, b.ko.Spec.TrainingJobDefinition.CheckpointConfig.S3URI)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.DefinitionName\", a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.DefinitionName != nil && b.ko.Spec.TrainingJobDefinition.DefinitionName != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.DefinitionName != *b.ko.Spec.TrainingJobDefinition.DefinitionName {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.DefinitionName\", a.ko.Spec.TrainingJobDefinition.DefinitionName, b.ko.Spec.TrainingJobDefinition.DefinitionName)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption\", a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != nil && b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption != *b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption\", a.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption, b.ko.Spec.TrainingJobDefinition.EnableInterContainerTrafficEncryption)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableManagedSpotTraining\", a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != nil && b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining != *b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableManagedSpotTraining\", a.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining, b.ko.Spec.TrainingJobDefinition.EnableManagedSpotTraining)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableNetworkIsolation\", a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != nil && b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation != *b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.EnableNetworkIsolation\", a.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation, b.ko.Spec.TrainingJobDefinition.EnableNetworkIsolation)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.HyperParameterRanges != nil && b.ko.Spec.TrainingJobDefinition.HyperParameterRanges != nil {\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.CategoricalParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.ContinuousParameterRanges)\n\t\t\t}\n\t\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges\", a.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges, b.ko.Spec.TrainingJobDefinition.HyperParameterRanges.IntegerParameterRanges)\n\t\t\t}\n\t\t}\n\t\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinition.InputDataConfig, b.ko.Spec.TrainingJobDefinition.InputDataConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.InputDataConfig\", a.ko.Spec.TrainingJobDefinition.InputDataConfig, b.ko.Spec.TrainingJobDefinition.InputDataConfig)\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig, b.ko.Spec.TrainingJobDefinition.OutputDataConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig, b.ko.Spec.TrainingJobDefinition.OutputDataConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID != *b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.KMSKeyID)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != nil && b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath != *b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath\", a.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath, b.ko.Spec.TrainingJobDefinition.OutputDataConfig.S3OutputPath)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig, b.ko.Spec.TrainingJobDefinition.ResourceConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig\", a.ko.Spec.TrainingJobDefinition.ResourceConfig, b.ko.Spec.TrainingJobDefinition.ResourceConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceCount\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceCount\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceCount)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceType\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.InstanceType\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType, b.ko.Spec.TrainingJobDefinition.ResourceConfig.InstanceType)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeKMSKeyID)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != nil && b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB != *b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB\", a.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB, b.ko.Spec.TrainingJobDefinition.ResourceConfig.VolumeSizeInGB)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.RoleARN\", a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.RoleARN != nil && b.ko.Spec.TrainingJobDefinition.RoleARN != nil {\n\t\t\tif *a.ko.Spec.TrainingJobDefinition.RoleARN != *b.ko.Spec.TrainingJobDefinition.RoleARN {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.RoleARN\", a.ko.Spec.TrainingJobDefinition.RoleARN, b.ko.Spec.TrainingJobDefinition.RoleARN)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StaticHyperParameters\", a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.StaticHyperParameters != nil && b.ko.Spec.TrainingJobDefinition.StaticHyperParameters != nil {\n\t\t\tif !ackcompare.MapStringStringPEqual(a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StaticHyperParameters\", a.ko.Spec.TrainingJobDefinition.StaticHyperParameters, b.ko.Spec.TrainingJobDefinition.StaticHyperParameters)\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition, b.ko.Spec.TrainingJobDefinition.StoppingCondition) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition\", a.ko.Spec.TrainingJobDefinition.StoppingCondition, b.ko.Spec.TrainingJobDefinition.StoppingCondition)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds != *b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxRuntimeInSeconds)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != nil && b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds != *b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds\", a.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds, b.ko.Spec.TrainingJobDefinition.StoppingCondition.MaxWaitTimeInSeconds)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective, b.ko.Spec.TrainingJobDefinition.TuningObjective) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective\", a.ko.Spec.TrainingJobDefinition.TuningObjective, b.ko.Spec.TrainingJobDefinition.TuningObjective)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective != nil {\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.MetricName\", a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName != *b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.MetricName\", a.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName, b.ko.Spec.TrainingJobDefinition.TuningObjective.MetricName)\n\t\t\t\t}\n\t\t\t}\n\t\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.Type\", a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type)\n\t\t\t} else if a.ko.Spec.TrainingJobDefinition.TuningObjective.Type != nil && b.ko.Spec.TrainingJobDefinition.TuningObjective.Type != nil {\n\t\t\t\tif *a.ko.Spec.TrainingJobDefinition.TuningObjective.Type != *b.ko.Spec.TrainingJobDefinition.TuningObjective.Type {\n\t\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.TuningObjective.Type\", a.ko.Spec.TrainingJobDefinition.TuningObjective.Type, b.ko.Spec.TrainingJobDefinition.TuningObjective.Type)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.TrainingJobDefinition.VPCConfig, b.ko.Spec.TrainingJobDefinition.VPCConfig) {\n\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig\", a.ko.Spec.TrainingJobDefinition.VPCConfig, b.ko.Spec.TrainingJobDefinition.VPCConfig)\n\t\t} else if a.ko.Spec.TrainingJobDefinition.VPCConfig != nil && b.ko.Spec.TrainingJobDefinition.VPCConfig != nil {\n\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs, b.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs\", a.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs, b.ko.Spec.TrainingJobDefinition.VPCConfig.SecurityGroupIDs)\n\t\t\t}\n\t\t\tif !ackcompare.SliceStringPEqual(a.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets, b.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets) {\n\t\t\t\tdelta.Add(\"Spec.TrainingJobDefinition.VPCConfig.Subnets\", a.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets, b.ko.Spec.TrainingJobDefinition.VPCConfig.Subnets)\n\t\t\t}\n\t\t}\n\t}\n\tif !reflect.DeepEqual(a.ko.Spec.TrainingJobDefinitions, b.ko.Spec.TrainingJobDefinitions) {\n\t\tdelta.Add(\"Spec.TrainingJobDefinitions\", a.ko.Spec.TrainingJobDefinitions, b.ko.Spec.TrainingJobDefinitions)\n\t}\n\tif ackcompare.HasNilDifference(a.ko.Spec.WarmStartConfig, b.ko.Spec.WarmStartConfig) {\n\t\tdelta.Add(\"Spec.WarmStartConfig\", a.ko.Spec.WarmStartConfig, b.ko.Spec.WarmStartConfig)\n\t} else if a.ko.Spec.WarmStartConfig != nil && b.ko.Spec.WarmStartConfig != nil {\n\t\tif !reflect.DeepEqual(a.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs, b.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs) {\n\t\t\tdelta.Add(\"Spec.WarmStartConfig.ParentHyperParameterTuningJobs\", a.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs, b.ko.Spec.WarmStartConfig.ParentHyperParameterTuningJobs)\n\t\t}\n\t\tif ackcompare.HasNilDifference(a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType) {\n\t\t\tdelta.Add(\"Spec.WarmStartConfig.WarmStartType\", a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType)\n\t\t} else if a.ko.Spec.WarmStartConfig.WarmStartType != nil && b.ko.Spec.WarmStartConfig.WarmStartType != nil {\n\t\t\tif *a.ko.Spec.WarmStartConfig.WarmStartType != *b.ko.Spec.WarmStartConfig.WarmStartType {\n\t\t\t\tdelta.Add(\"Spec.WarmStartConfig.WarmStartType\", a.ko.Spec.WarmStartConfig.WarmStartType, b.ko.Spec.WarmStartConfig.WarmStartType)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn delta\n}", "func (c *Context) Histogram(stat string, value float64) {\n\tfor _, sink := range c.sinks {\n\t\tsink.Histogram(c, stat, value)\n\t}\n}", "func createHistogram(data []float64, n int) {\n\tp, err := plot.New()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tp.Add(plotter.NewGrid())\n\thistdata := valuer{data}\n\tp.Add(plotter.NewHist(histdata, n))\n\tp.X.Label.Text = \"time / ps\"\n\tp.Y.Label.Text = \"frequency\"\n\tp.Title.Text = fmt.Sprintf(\"Frequency of lifetime data from lifetime.txt. %v bins.\", n)\n\n\tif err := p.Save(5, 5, fmt.Sprintf(\"out/Histogram with %v bins.png\", n)); err != nil {\n\t\tpanic(err)\n\t}\n}", "func BenchmarkFibHeap_Merge(b *testing.B) {\n\theap1 := NewFloatFibHeap()\n\theap2 := NewFloatFibHeap()\n\n\tfor i := 0; i < b.N; i++ {\n\t\theap1.Enqueue(2 * 1e10 * (rand.Float64() - 0.5))\n\t\theap2.Enqueue(2 * 1e10 * (rand.Float64() - 0.5))\n\t\t_, err := heap1.Merge(&heap2)\n\t\tassert.NoError(b, err)\n\t}\n}", "func (block *Block) CalculateHistogram(x, y int) {\n\n\tblock.Histogram = make([]int, 256)\n\tblock.Histogram.Generate(block)\n\tblock.Histogram.Clip(block.Picture.ClipLimit)\n\tblock.Histogram.Map(int(block.Picture.ColorMin), int(block.Picture.ColorMax), block.Picture.BlockWidth, block.Picture.BlockHeight)\n\n\t// Notify SubBlocks waiting for this block that we're ready!\n\tfor _, subBlock := range block.Notify {\n\t\tsubBlock.WaitGroup.Done()\n\t}\n\n}", "func HistogramFixedWidth(scope *Scope, values tf.Output, value_range tf.Output, nbins tf.Output, optional ...HistogramFixedWidthAttr) (out tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\tattrs := map[string]interface{}{}\n\tfor _, a := range optional {\n\t\ta(attrs)\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"HistogramFixedWidth\",\n\t\tInput: []tf.Input{\n\t\t\tvalues, value_range, nbins,\n\t\t},\n\t\tAttrs: attrs,\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0)\n}", "func (h *Histogram) Clone() *Histogram {\n\tcopy := NewHistogram(h.Offset, h.Divider)\n\tcopy.CopyFrom(h)\n\treturn copy\n}", "func getHistogramSamples(name string,\n\tlabels []*prometheus_models.LabelPair,\n\ttimestampMs int64,\n\th *prometheus_models.Histogram,\n\tentity string,\n) []Sample {\n\tsamples := make([]Sample, len(h.GetBucket())*2+2)\n\tsamples[0] = Sample{\n\t\tname: name + \"_count\",\n\t\tlabels: labels,\n\t\ttimestampMs: timestampMs,\n\t\tvalue: strconv.FormatUint(h.GetSampleCount(), 10),\n\t\tentity: entity,\n\t}\n\tsamples[1] = Sample{\n\t\tname: name + \"_sum\",\n\t\tlabels: labels,\n\t\ttimestampMs: timestampMs,\n\t\tvalue: strconv.FormatFloat(h.GetSampleSum(), 'E', -1, 64),\n\t\tentity: entity,\n\t}\n\tfor i, b := range h.GetBucket() {\n\t\tsamples[i+2] = Sample{\n\t\t\tname: fmt.Sprintf(\"%s_bucket_%d_le\", name, i),\n\t\t\tlabels: labels,\n\t\t\ttimestampMs: timestampMs,\n\t\t\tvalue: strconv.FormatFloat(b.GetUpperBound(), 'E', -1, 64),\n\t\t\tentity: entity,\n\t\t}\n\t\tsamples[i+3] = Sample{\n\t\t\tname: fmt.Sprintf(\"%s_bucket_%d_count\", name, i),\n\t\t\tlabels: labels,\n\t\t\ttimestampMs: timestampMs,\n\t\t\tvalue: strconv.FormatUint(b.GetCumulativeCount(), 10),\n\t\t\tentity: entity,\n\t\t}\n\t}\n\treturn samples\n}", "func (c *Aggregator) Merge(oa export.Aggregator, desc *metric.Descriptor) error {\n\to, _ := oa.(*Aggregator)\n\tif o == nil {\n\t\treturn aggregator.NewInconsistentAggregatorError(c, oa)\n\t}\n\tc.value.AddNumber(desc.NumberKind(), o.value)\n\treturn nil\n}", "func estimateHistMinMax(dp pmetric.HistogramDataPoint) (float64, float64) {\n\tbounds := dp.MExplicitBounds()\n\tcounts := dp.MBucketCounts()\n\n\t// Because we do not know the actual min and max, we estimate them based on the min and max non-empty bucket\n\tminIdx, maxIdx := -1, -1\n\tfor y := 0; y < len(counts); y++ {\n\t\tif counts[y] > 0 {\n\t\t\tif minIdx == -1 {\n\t\t\t\tminIdx = y\n\t\t\t}\n\t\t\tmaxIdx = y\n\t\t}\n\t}\n\n\tif minIdx == -1 || maxIdx == -1 {\n\t\treturn 0, 0\n\t}\n\n\tvar min, max float64\n\n\t// Use lower bound for min unless it is the first bucket which has no lower bound, then use upper\n\tif minIdx == 0 {\n\t\tmin = bounds[minIdx]\n\t} else {\n\t\tmin = bounds[minIdx-1]\n\t}\n\n\t// Use upper bound for max unless it is the last bucket which has no upper bound, then use lower\n\tif maxIdx == len(counts)-1 {\n\t\tmax = bounds[maxIdx-1]\n\t} else {\n\t\tmax = bounds[maxIdx]\n\t}\n\n\treturn min, max\n}", "func NewHistogram(nanoseconds []float64, opts *HistogramOptions) *Histogram {\n\tif opts.BinCount <= 0 {\n\t\tpanic(\"binCount must be larger than 0\")\n\t}\n\n\thist := &Histogram{}\n\thist.Width = 40\n\thist.Bins = make([]HistogramBin, opts.BinCount)\n\tif len(nanoseconds) == 0 {\n\t\treturn hist\n\t}\n\n\tnanoseconds = append(nanoseconds[:0:0], nanoseconds...)\n\tsort.Float64s(nanoseconds)\n\n\thist.Minimum = nanoseconds[0]\n\thist.Maximum = nanoseconds[len(nanoseconds)-1]\n\n\thist.Average = float64(0)\n\tfor _, x := range nanoseconds {\n\t\thist.Average += x\n\t}\n\thist.Average /= float64(len(nanoseconds))\n\n\tp := func(p float64) float64 {\n\t\ti := int(math.Round(p * float64(len(nanoseconds))))\n\t\tif i < 0 {\n\t\t\ti = 0\n\t\t}\n\t\tif i >= len(nanoseconds) {\n\t\t\ti = len(nanoseconds) - 1\n\t\t}\n\t\treturn nanoseconds[i]\n\t}\n\n\thist.P50, hist.P90, hist.P99, hist.P999, hist.P9999 = p(0.50), p(0.90), p(0.99), p(0.999), p(0.9999)\n\n\tclampMaximum := hist.Maximum\n\tif opts.ClampPercentile > 0 {\n\t\tclampMaximum = p(opts.ClampPercentile)\n\t}\n\tif opts.ClampMaximum > 0 {\n\t\tclampMaximum = opts.ClampMaximum\n\t}\n\n\tvar minimum, spacing float64\n\n\tif opts.NiceRange {\n\t\tminimum, spacing = calculateNiceSteps(hist.Minimum, clampMaximum, opts.BinCount)\n\t} else {\n\t\tminimum, spacing = calculateSteps(hist.Minimum, clampMaximum, opts.BinCount)\n\t}\n\n\tfor i := range hist.Bins {\n\t\thist.Bins[i].Start = spacing*float64(i) + minimum\n\t}\n\thist.Bins[0].Start = hist.Minimum\n\n\tfor _, x := range nanoseconds {\n\t\tk := int(float64(x-minimum) / spacing)\n\t\tif k < 0 {\n\t\t\tk = 0\n\t\t}\n\t\tif k >= opts.BinCount {\n\t\t\tk = opts.BinCount - 1\n\t\t\thist.Bins[k].andAbove = true\n\t\t}\n\t\thist.Bins[k].Count++\n\t}\n\n\tmaxBin := 0\n\tfor _, bin := range hist.Bins {\n\t\tif bin.Count > maxBin {\n\t\t\tmaxBin = bin.Count\n\t\t}\n\t}\n\n\tfor k := range hist.Bins {\n\t\tbin := &hist.Bins[k]\n\t\tbin.Width = float64(bin.Count) / float64(maxBin)\n\t}\n\n\treturn hist\n}", "func (b Bucket) splitBucket(splitPoint, lowerBound int64) (Bucket, Bucket) {\n\tupperBound := (int64)(*b.UpperBound.(*tree.DInt))\n\n\t// The bucket size calculation has a -1 because NumRange does not\n\t// include values equal to UpperBound.\n\tbucketSize := upperBound - lowerBound - 1\n\tif bucketSize <= 0 {\n\t\tpanic(\"empty bucket should have been skipped\")\n\t}\n\n\tif splitPoint >= upperBound || splitPoint <= lowerBound {\n\t\tpanic(fmt.Sprintf(\"splitPoint (%d) must be between UpperBound (%d) and lowerBound (%d)\",\n\t\t\tsplitPoint, upperBound, lowerBound))\n\t}\n\n\t// Make the lower bucket.\n\tlowerMatchSize := splitPoint - lowerBound - 1\n\tlowerNumRange := (int64)(float64(b.NumRange) * float64(lowerMatchSize) / float64(bucketSize))\n\tlowerNumEq := (int64)(float64(b.NumRange) / float64(bucketSize))\n\tbucLower := Bucket{NumEq: lowerNumEq, NumRange: lowerNumRange, UpperBound: makeDatum(splitPoint)}\n\n\t// Make the upper bucket.\n\tupperMatchSize := upperBound - splitPoint - 1\n\tbucUpper := b\n\tbucUpper.NumRange = (int64)(float64(b.NumRange) * float64(upperMatchSize) / float64(bucketSize))\n\n\treturn bucLower, bucUpper\n}", "func (datadog *Datadog) Histogram(name string, startTime time.Time, tags []string) error {\n\telapsedTime := time.Since(startTime).Seconds() * 1000\n\terr := datadog.client.Histogram(name, elapsedTime, tags, float64(1))\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (tt *telloTrackT) deriveScale() (scale float32) {\n\tscale = 1.0 // minimum scale value\n\tif tt.maxX > scale {\n\t\tscale = tt.maxX\n\t}\n\tif -tt.minX > scale {\n\t\tscale = -tt.minX\n\t}\n\tif tt.maxY > scale {\n\t\tscale = tt.maxY\n\t}\n\tif -tt.minY > scale {\n\t\tscale = -tt.minY\n\t}\n\tscale = float32(math.Ceil(float64(scale)))\n\treturn scale\n}", "func (h *Histogram) Export() *HistogramData {\n\tvar res HistogramData\n\tres.Count = h.Counter.Count\n\tres.Min = h.Counter.Min\n\tres.Max = h.Counter.Max\n\tres.Sum = h.Counter.Sum\n\tres.Avg = h.Counter.Avg()\n\tres.StdDev = h.Counter.StdDev()\n\tmultiplier := h.Divider\n\toffset := h.Offset\n\t// calculate the last bucket index\n\tlastIdx := -1\n\tfor i := numBuckets - 1; i >= 0; i-- {\n\t\tif h.Hdata[i] > 0 {\n\t\t\tlastIdx = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif lastIdx == -1 {\n\t\treturn &res\n\t}\n\n\t// previous bucket value:\n\tprev := histogramBucketValues[0]\n\tvar total int64\n\tctrTotal := float64(h.Count)\n\t// export the data of each bucket of the histogram\n\tfor i := 0; i <= lastIdx; i++ {\n\t\tif h.Hdata[i] == 0 {\n\t\t\t// empty bucket: skip it but update prev which is needed for next iter\n\t\t\tif i < numValues {\n\t\t\t\tprev = histogramBucketValues[i]\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\tvar b Bucket\n\t\ttotal += int64(h.Hdata[i])\n\t\tif len(res.Data) == 0 {\n\t\t\t// First entry, start is min\n\t\t\tb.Start = h.Min\n\t\t} else {\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t}\n\t\tb.Percent = 100. * float64(total) / ctrTotal\n\t\tif i < numValues {\n\t\t\tcur := histogramBucketValues[i]\n\t\t\tb.End = multiplier*float64(cur) + offset\n\t\t\tprev = cur\n\t\t} else {\n\t\t\t// Last Entry\n\t\t\tb.Start = multiplier*float64(prev) + offset\n\t\t\tb.End = h.Max\n\t\t}\n\t\tb.Count = int64(h.Hdata[i])\n\t\tres.Data = append(res.Data, b)\n\t}\n\tres.Data[len(res.Data)-1].End = h.Max\n\treturn &res\n}", "func d19combineRect(rectA, rectB *d19rectT) d19rectT {\n\tvar newRect d19rectT\n\n\tfor index := 0; index < d19numDims; index++ {\n\t\tnewRect.min[index] = d19fmin(rectA.min[index], rectB.min[index])\n\t\tnewRect.max[index] = d19fmax(rectA.max[index], rectB.max[index])\n\t}\n\n\treturn newRect\n}", "func scale(val float64, min float64, max float64, outMin float64, outMax float64) float64 {\r\n\tdenom := 1.0\r\n\ty := 0.0\r\n\tif outMin - min != 0 {\r\n\t\tdenom = outMin - min\r\n\t\ty = (outMax - max) / denom * val - min + outMin\r\n\t} else {\r\n\t\ty = outMax / max * val - min + outMin\r\n\t}\r\n\treturn y\r\n}", "func NewHistogram(opts HistogramOptions) *Histogram {\n\tif opts.NumBuckets == 0 {\n\t\topts.NumBuckets = 32\n\t}\n\tif opts.BaseBucketSize == 0.0 {\n\t\topts.BaseBucketSize = 1.0\n\t}\n\th := Histogram{\n\t\topts: opts,\n\t\tbuckets: make([]bucketInternal, opts.NumBuckets),\n\t\tcount: newCounter(),\n\t\tsum: newCounter(),\n\t\tsumOfSquares: newCounter(),\n\t\ttracker: newTracker(),\n\n\t\tlogBaseBucketSize: math.Log(opts.BaseBucketSize),\n\t\toneOverLogOnePlusGrowthFactor: 1 / math.Log(1+opts.GrowthFactor),\n\t}\n\tm := 1.0 + opts.GrowthFactor\n\tdelta := opts.BaseBucketSize\n\th.buckets[0].lowBound = float64(opts.MinValue)\n\th.buckets[0].count = newCounter()\n\tfor i := 1; i < opts.NumBuckets; i++ {\n\t\th.buckets[i].lowBound = float64(opts.MinValue) + delta\n\t\th.buckets[i].count = newCounter()\n\t\tdelta = delta * m\n\t}\n\treturn &h\n}", "func histHelper(expFormat string, labels ...interface{}) *metrics.Histogram {\n\treturn metrics.GetOrCreateHistogram(fmt.Sprintf(expFormat, labels...))\n}", "func (bh* BinomialHeap) Merge(other *BinomialHeap) {\n bh.size += other.size\n\n for _, child := range nodeIterator(other.forest_head) {\n removeFromLinkedList(&other.forest_head, child)\n bh.insert(child)\n }\n}", "func d18combineRect(rectA, rectB *d18rectT) d18rectT {\n\tvar newRect d18rectT\n\n\tfor index := 0; index < d18numDims; index++ {\n\t\tnewRect.min[index] = d18fmin(rectA.min[index], rectB.min[index])\n\t\tnewRect.max[index] = d18fmax(rectA.max[index], rectB.max[index])\n\t}\n\n\treturn newRect\n}", "func mockStatsHistogram(id int64, values []types.Datum, repeat int64, tp *types.FieldType) *statistics.Histogram {\n\tndv := len(values)\n\thistogram := statistics.NewHistogram(id, int64(ndv), 0, 0, tp, ndv, 0)\n\tfor i := 0; i < ndv; i++ {\n\t\thistogram.AppendBucket(&values[i], &values[i], repeat*int64(i+1), repeat)\n\t}\n\treturn histogram\n}", "func (heap SkewHeap) Merge(other SkewHeap) *SkewHeap {\n\tready := make(chan bool, 2)\n\n\tvar rootA, rootB *skewNode\n\tvar sizeA, sizeB int\n\n\t// Because each heap may be used by other go routines, locking their mutexes\n\t// and copying their contents is done in another routine, and this thread\n\t// blocks on receiving a signal from the locking thread. This helps to avoid\n\t// unnecessary blocking by attempting to lock two mutexes serially.\n\n\tgo func() {\n\t\theap.lock()\n\t\tsizeA = heap.Size()\n\t\trootA = heap.root.copyNode()\n\t\theap.unlock()\n\t\tready <- true\n\t}()\n\n\tgo func() {\n\t\tother.lock()\n\t\tsizeB = other.Size()\n\t\trootB = other.root.copyNode()\n\t\tother.unlock()\n\t\tready <- true\n\t}()\n\n\t// Wait on copies to be made\n\t<-ready\n\t<-ready\n\n\tnewHeap := New()\n\tnewHeap.size += sizeA + sizeB\n\tnewHeap.root = rootA.merge(rootB)\n\n\treturn newHeap\n}", "func (c *Client) Histogram(stat string, value int, rate float64) error {\n\treturn c.send(stat, rate, \"%d|ms\", value)\n}", "func d14combineRect(rectA, rectB *d14rectT) d14rectT {\n\tvar newRect d14rectT\n\n\tfor index := 0; index < d14numDims; index++ {\n\t\tnewRect.min[index] = d14fmin(rectA.min[index], rectB.min[index])\n\t\tnewRect.max[index] = d14fmax(rectA.max[index], rectB.max[index])\n\t}\n\n\treturn newRect\n}", "func (b *BarChart) DataRange() (xmin, xmax, ymin, ymax float64) {\n\n// fmt.Println (\"DataRange start \" )\n\n\n\tcatMin := b.XMin\n\tcatMax := catMin + float64(len(b.Values)-1)\n\n\tvalMin := math.Inf(1)\n\tvalMax := math.Inf(-1)\n\tfor i, val := range b.Values {\n\t\tvalBot := b.stackedOn.BarHeight(i)\n\t\tvalTop := valBot + val\n\t\tvalMin = math.Min(valMin, math.Min(valBot, valTop))\n\t\tvalMax = math.Max(valMax, math.Max(valBot, valTop))\n\t}\n\tif !b.Horizontal { // case normal\n\t\treturn catMin, catMax, valMin, valMax\n\t}\n//\tfmt.Println (\"DataRange valMin\" ,valMin)\n// fmt.Println (\"DataRange valMax\" ,valMax)\n// fmt.Println (\"DataRange catMin\" ,catMin)\n// fmt.Println (\"DataRange catMax\" ,catMax)\n\n// fmt.Println (\"DataRange end \" )\n\n\treturn valMin, valMax, catMin, catMax\n}", "func (a ValueAggregation) Union(other ValueAggregation) ValueAggregation {\r\n\treturn ValueAggregation{\r\n\t\tFirstValue: a.FirstValue,\r\n\t\tLastValue: other.LastValue,\r\n\t\tSampleCount: a.SampleCount + other.SampleCount,\r\n\t\tMaximum: maxFloat(a.Maximum, other.Maximum),\r\n\t\tMinimum: minFloat(a.Minimum, other.Minimum),\r\n\t\tSum: a.Sum + other.Sum,\r\n\t\tSumSquare: a.SumSquare + other.SumSquare,\r\n\t\tBuckets: bucketMerge(a.Buckets, other.Buckets),\r\n\t}\r\n}", "func (ms HistogramBucket) CopyTo(dest HistogramBucket) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetCount(ms.Count())\n\tms.Exemplar().CopyTo(dest.Exemplar())\n}", "func (c *Aggregator) Merge(oa aggregator.Aggregator, desc *sdkapi.Descriptor) error {\n\to, _ := oa.(*Aggregator)\n\tif o == nil {\n\t\treturn aggregator.NewInconsistentAggregatorError(c, oa)\n\t}\n\n\tc.samples = combine(c.samples, o.samples)\n\treturn nil\n}", "func (p *Provider) Histogram(name string, value float64, tags map[string]string) error {\n\treturn p.client.Histogram(name, value, p.formatTags(tags), p.rate)\n}", "func blend(c1, c2 uint8, ratio float64) uint8 {\n\treturn uint8(math.Floor((float64(c1)*(1.0-ratio) + float64(c2)*ratio) + 0.5))\n}", "func (c *Aggregator) Merge(oa export.Aggregator, desc *metric.Descriptor) error {\n\to, _ := oa.(*Aggregator)\n\tif o == nil {\n\t\treturn aggregator.NewInconsistentAggregatorError(c, oa)\n\t}\n\n\tc.state.sum.AddNumber(desc.NumberKind(), o.state.sum)\n\tc.state.count += o.state.count\n\n\tfor i := 0; i < len(c.state.bucketCounts); i++ {\n\t\tc.state.bucketCounts[i] += o.state.bucketCounts[i]\n\t}\n\treturn nil\n}", "func (s *salaryStats) merge(salary *salaryStats) {\n\ts.employeeCount += salary.employeeCount\n\ts.total += salary.total\n\ts.average = s.total / float64(s.employeeCount)\n\n\tif len(s.lowest) == 0 || (len(salary.lowest) > 0 && salary.lowest[0].salary < s.lowest[0].salary) {\n\t\ts.lowest = append([]*employee{}, salary.lowest...)\n\t} else if (len(s.lowest) > 0 && len(salary.lowest) > 0) && s.lowest[0].salary == salary.lowest[0].salary {\n\t\ts.lowest = append(s.lowest, salary.lowest...)\n\t}\n\n\tif len(s.biggest) == 0 || (len(salary.biggest) > 0 && salary.biggest[0].salary > s.biggest[0].salary) {\n\t\ts.biggest = append([]*employee{}, salary.biggest...)\n\t} else if (len(s.biggest) > 0 && len(salary.biggest) > 0) && s.biggest[0].salary == salary.biggest[0].salary {\n\t\ts.biggest = append(s.biggest, salary.biggest...)\n\t}\n}", "func (this *ViewScanner) MergeRange(newRange *ViewRange) {\n\tlog.Printf(\"considering new range %v\", newRange)\n\tfor _, r := range this.ranges {\n\t\t// if the new range is a subset of an existing range\n\t\t// this new ragne replaces it\n\t\tif newRange.IsSubsetOf(r) {\n\t\t\tlog.Printf(\"new is subset, shrinking\")\n\t\t\tr.Start = newRange.Start\n\t\t\tr.End = newRange.End\n\t\t\treturn\n\t\t}\n\n\t\tif r.IsSubsetOf(newRange) {\n\t\t\tlog.Printf(\"old subset, dropping\")\n\t\t\t// drop this factor\n\t\t\t// we already have a stricter range that contains it\n\t\t\treturn\n\t\t}\n\t}\n\n\tlog.Printf(\"couldn't merge, so add\")\n\t// if we got here, we add it as a new range\n\tthis.ranges = append(this.ranges, newRange)\n}", "func NewHashRange(range1 uint64, range2 uint64) HashRange {\n\thashRange := HashRange{}\n\thashRange.RangeMin = uint64(math.Min(float64(range1), float64(range2)))\n\thashRange.RangeMax = uint64(math.Max(float64(range1), float64(range2)))\n\treturn hashRange\n}", "func NewHistogram(name, help string, cutoffs []int64) *Histogram {\n\tlabels := make([]string, len(cutoffs)+1)\n\tfor i, v := range cutoffs {\n\t\tlabels[i] = fmt.Sprintf(\"%d\", v)\n\t}\n\tlabels[len(labels)-1] = \"inf\"\n\treturn NewGenericHistogram(name, help, cutoffs, labels, \"Count\", \"Total\")\n}", "func init() {\n\tval2Bucket = make([]int, maxArrayValue)\n\tmaxArrayValueIndex = -1\n\tfor i, value := range histogramBucketValues {\n\t\tif value == maxArrayValue {\n\t\t\tmaxArrayValueIndex = i\n\t\t\tbreak\n\t\t}\n\t}\n\tif maxArrayValueIndex == -1 {\n\t\tlog.Fatalf(\"Bug boundary maxArrayValue=%d not found in bucket list %v\", maxArrayValue, histogramBucketValues)\n\t}\n\tidx := 0\n\tfor i := int32(0); i < maxArrayValue; i++ {\n\t\tif i >= histogramBucketValues[idx] {\n\t\t\tidx++\n\t\t}\n\t\tval2Bucket[i] = idx\n\t}\n\t// coding bug detection (aka impossible if it works once) until 1000\n\tif idx != maxArrayValueIndex {\n\t\tlog.Fatalf(\"Bug in creating histogram index idx %d vs index %d up to %d\", idx, int(maxArrayValue), maxArrayValue)\n\t}\n}", "func (n *node) chooseSplitAxis() int { //TODO Make the code prettier\n\t//[CSA 1]\n\t//Entries sorted by Latitude\n\tS_lat := 0.000000 //used to determine the best axis to split on\n\tbestK_lat := 0 //used to determine the best distribution\n\tminOverlap_lat := -1.000000\n\tbest_area_lat := -1.000000\n\tsortByLat := make([]entry, len(n.entries)) // len(sortByLat) == len(n.entries) is needed for copy to work\n\tcopy(sortByLat, n.entries)\n\tsort.Sort(byLat(sortByLat))\n\n\t//Entries sorted by Longitude\n\tS_long := 0.000000 //used to determine the best axis to split on\n\tbestK_long := 0 //used to determine the best distribution\n\tminOverlap_long := -1.000000\n\tbest_area_long := -1.000000\n\tsort.Sort(byLong(n.entries))\n\n\t//For each axis: M - 2m + 2 distributions of the M+1 entries into two groups are determined\n\td := (RTree_M - (2 * RTree_m) + 2)\n\tfor k := 1; k <= d; k++ {\n\t\t//By Latitude\n\t\tLatGroup1 := make([]entry, (RTree_m - 1 + k))\n\t\tLatGroup2 := make([]entry, (RTree_M - len(LatGroup1) + 1))\n\t\tcopy(LatGroup1, sortByLat[:RTree_m-1+k])\n\t\tcopy(LatGroup2, sortByLat[RTree_m-1+k:])\n\t\tlatGoodness := marginOf(LatGroup1) + marginOf(LatGroup2)\n\t\tS_lat += latGoodness\n\t\t// test if this distribution has the best overlap value for latitude\n\t\tmbr1 := mbrOf(LatGroup1...)\n\t\tmbr2 := mbrOf(LatGroup2...)\n\t\tif o := mbr1.OverlapWith(mbr2); o <= minOverlap_lat || minOverlap_lat == -1 {\n\t\t\tif o < minOverlap_lat || minOverlap_lat == -1 {\n\t\t\t\tbestK_lat = k //we have a new best\n\t\t\t\tminOverlap_lat = o\n\t\t\t\tbest_area_lat = mbr1.Area() + mbr2.Area()\n\t\t\t} else { //tie -> keep the distribution with the least area\n\t\t\t\ta_now := mbr1.Area() + mbr2.Area()\n\t\t\t\tif a_now < best_area_lat {\n\t\t\t\t\tbestK_lat = k //we have a new best\n\t\t\t\t\tminOverlap_lat = o\n\t\t\t\t\tbest_area_lat = mbr1.Area() + mbr2.Area()\n\t\t\t\t}\n\t\t\t}\n\t\t} //else don't change the value\n\n\t\t//By Longitude\n\t\tLongGroup1 := make([]entry, (RTree_m - 1 + k))\n\t\tLongGroup2 := make([]entry, (RTree_M - len(LongGroup1) + 1))\n\t\tcopy(LongGroup1, n.entries[:RTree_m-1+k])\n\t\tcopy(LongGroup2, n.entries[RTree_m-1+k:])\n\t\tlongGoodness := marginOf(LongGroup1) + marginOf(LongGroup2)\n\t\tS_long += longGoodness\n\t\t// test if this distribution has the best overlap value for longitude\n\t\tmbr1 = mbrOf(LongGroup1...)\n\t\tmbr2 = mbrOf(LongGroup2...)\n\t\tif o := mbr1.OverlapWith(mbr2); o <= minOverlap_long || minOverlap_long == -1 {\n\t\t\tif o < minOverlap_long || minOverlap_long == -1 {\n\t\t\t\tbestK_long = k //we have a new best\n\t\t\t\tminOverlap_long = o\n\t\t\t\tbest_area_long = mbr1.Area() + mbr2.Area()\n\t\t\t} else { //tie -> keep the distribution with the least area\n\t\t\t\ta_now := mbr1.Area() + mbr2.Area()\n\t\t\t\tif a_now < best_area_long {\n\t\t\t\t\tbestK_long = k //we have a new best\n\t\t\t\t\tminOverlap_long = o\n\t\t\t\t\tbest_area_long = mbr1.Area() + mbr2.Area()\n\t\t\t\t}\n\t\t\t}\n\t\t} //else don't change the value\n\t}\n\t//CSA2: Choose the axis with the minimum S as split axis\n\tif S_lat < S_long {\n\t\tn.entries = sortByLat\n\t\treturn bestK_lat\n\t}\n\treturn bestK_long\n}", "func Merge(a, b AABB) AABB {\n\treturn AABB{\n\t\tf.Min(a.L, b.L), f.Min(a.B, b.B),\n\t\tf.Max(a.R, b.R), f.Max(a.T, b.T),\n\t}\n}", "func conferenceSizesHistogram(conferencesData []int) (conferenceSizesHistogram map[float64]uint64, sum uint64) {\n\tvar sizes = make(map[float64]uint64)\n\n\t//calculate sum for histogram\n\tsum = 0\n\tfor _, v := range conferencesData {\n\t\tsum += uint64(v)\n\t}\n\n\t//for the histgram buckets we need to omit the last field b/c the +inf bucket is added automatically\n\tconferencesData = conferencesData[:len(conferencesData)-1]\n\n\t//the bucket values have to be cumulative\n\tvar i int\n\tfor i = len(conferencesData) - 1; i >= 0; i-- {\n\t\tvar cumulative int\n\t\tvar j int\n\t\tfor j = i; j >= 0; j-- {\n\t\t\tcumulative += conferencesData[j]\n\t\t}\n\t\tconferencesData[i] = cumulative\n\t}\n\n\tfor i, v := range conferencesData {\n\t\tsizes[float64(i)] = uint64(v)\n\t}\n\n\treturn sizes, sum\n}", "func prepareSplitDescs(\n\tctx context.Context,\n\tst *cluster.Settings,\n\trightRangeID roachpb.RangeID,\n\tsplitKey roachpb.RKey,\n\texpiration hlc.Timestamp,\n\tleftDesc *roachpb.RangeDescriptor,\n) (*roachpb.RangeDescriptor, *roachpb.RangeDescriptor) {\n\t// Create right hand side range descriptor.\n\trightDesc := roachpb.NewRangeDescriptor(rightRangeID, splitKey, leftDesc.EndKey, leftDesc.Replicas())\n\n\t// Init updated version of existing range descriptor.\n\t{\n\t\ttmp := *leftDesc\n\t\tleftDesc = &tmp\n\t}\n\n\tleftDesc.IncrementGeneration()\n\tleftDesc.EndKey = splitKey\n\n\t// Set the generation of the right hand side descriptor to match that of the\n\t// (updated) left hand side. See the comment on the field for an explanation\n\t// of why generations are useful.\n\trightDesc.Generation = leftDesc.Generation\n\n\tsetStickyBit(rightDesc, expiration)\n\treturn leftDesc, rightDesc\n}", "func (e *dataUsageEntry) merge(other dataUsageEntry) {\n\te.Objects += other.Objects\n\te.Versions += other.Versions\n\te.Size += other.Size\n\tors := other.ReplicationStats\n\tempty := replicationStats{}\n\tif ors != nil && *ors != empty {\n\t\tif e.ReplicationStats == nil {\n\t\t\te.ReplicationStats = &replicationStats{}\n\t\t}\n\t\te.ReplicationStats.PendingSize += other.ReplicationStats.PendingSize\n\t\te.ReplicationStats.FailedSize += other.ReplicationStats.FailedSize\n\t\te.ReplicationStats.ReplicatedSize += other.ReplicationStats.ReplicatedSize\n\t\te.ReplicationStats.ReplicaSize += other.ReplicationStats.ReplicaSize\n\t\te.ReplicationStats.PendingCount += other.ReplicationStats.PendingCount\n\t\te.ReplicationStats.FailedCount += other.ReplicationStats.FailedCount\n\n\t}\n\n\tfor i, v := range other.ObjSizes[:] {\n\t\te.ObjSizes[i] += v\n\t}\n}", "func mergeUpdate(p, q *SlugResponse, t float64) SlugResponsePlusPlus {\n\t// Make of map of strings\n\t// to buses\n\tmb := map[string]Data{}\n\t// Loop through first\n\t// ping\n\tfor _, bus := range *p {\n\t\t// Map the bus ID to the\n\t\t// bus datastructure\n\t\tmb[bus.ID] = bus\n\t}\n\t// Prepare a result\n\tresult := SlugResponsePlusPlus{}\n\t// Loop through the second ping\n\tfor _, pingTwoBus := range *q {\n\t\t// Make a bus with angles and speed\n\t\td := DataPlusPlus{}\n\t\t// Add the buses' data to the bus++?\n\t\td.Data = pingTwoBus\n\t\t// Check if the current bus exists in ping one\n\t\tif pingOneBus, contains := mb[d.ID]; contains {\n\t\t\t// If it does, calculate its distance, speed , and angle\n\t\t\tdistance := geo.Dist(pingOneBus.Lat, pingOneBus.Lon, pingTwoBus.Lat, pingTwoBus.Lon)\n\t\t\td.Speed = geo.Speed(distance, t)\n\t\t\td.Angle = geo.Dir(pingOneBus.Lat, pingOneBus.Lon, pingTwoBus.Lat, pingTwoBus.Lon)\n\t\t}\n\t\t// push the bus to the result\n\t\tresult = append(result, d)\n\t}\n\treturn result\n}", "func (c *Aggregator) Update(_ context.Context, number number.Number, desc *metric.Descriptor) error {\n\tkind := desc.NumberKind()\n\tasFloat := number.CoerceToFloat64(kind)\n\n\tbucketID := len(c.boundaries)\n\tfor i, boundary := range c.boundaries {\n\t\tif asFloat < boundary {\n\t\t\tbucketID = i\n\t\t\tbreak\n\t\t}\n\t}\n\t// Note: Binary-search was compared using the benchmarks. The following\n\t// code is equivalent to the linear search above:\n\t//\n\t// bucketID := sort.Search(len(c.boundaries), func(i int) bool {\n\t// return asFloat < c.boundaries[i]\n\t// })\n\t//\n\t// The binary search wins for very large boundary sets, but\n\t// the linear search performs better up through arrays between\n\t// 256 and 512 elements, which is a relatively large histogram, so we\n\t// continue to prefer linear search.\n\n\tc.lock.Lock()\n\tdefer c.lock.Unlock()\n\n\tc.state.count++\n\tc.state.sum.AddNumber(kind, number)\n\tc.state.bucketCounts[bucketID]++\n\n\treturn nil\n}", "func (g *HighAvailabilityGroup) Merge(other *HighAvailabilityGroup) *HighAvailabilityGroup {\n\treturn g\n}", "func (r *JobsService) Histogram(gethistogramrequest *GetHistogramRequest) *JobsHistogramCall {\n\tc := &JobsHistogramCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.gethistogramrequest = gethistogramrequest\n\treturn c\n}", "func (b *Bucket) Merge(others ...*Bucket) {\n\tfor _, other := range others {\n\t\tif b.Added < other.Added { // Find the maximum added\n\t\t\tb.Added = other.Added\n\t\t}\n\n\t\tif b.Taken < other.Taken { // Find the maximum taken\n\t\t\tb.Taken = other.Taken\n\t\t}\n\n\t\tif b.Last < other.Last { // Find the latest timestamp\n\t\t\tb.Last = other.Last\n\t\t}\n\t}\n}" ]
[ "0.57780266", "0.5698832", "0.5419318", "0.5303487", "0.5286819", "0.51774865", "0.49316067", "0.4912021", "0.48912844", "0.4807278", "0.48058608", "0.4794016", "0.47882244", "0.47707856", "0.47636208", "0.47465336", "0.474399", "0.47368336", "0.46916035", "0.4687006", "0.46804228", "0.46744207", "0.46556926", "0.46384695", "0.46229023", "0.45468", "0.45376226", "0.45306253", "0.45072436", "0.4483753", "0.44752282", "0.445892", "0.44544402", "0.44475597", "0.4439524", "0.44214937", "0.43972456", "0.43915075", "0.4391066", "0.4378954", "0.4364409", "0.4354148", "0.43520814", "0.43469125", "0.43324175", "0.43300208", "0.43203723", "0.4318115", "0.4312771", "0.4309538", "0.43002564", "0.42979574", "0.42928338", "0.42917112", "0.4285542", "0.42838374", "0.42831722", "0.4281287", "0.42806485", "0.4275839", "0.42736825", "0.4259424", "0.42580932", "0.4256719", "0.42479527", "0.42457515", "0.42431876", "0.4242414", "0.42385215", "0.42376274", "0.4228062", "0.42227858", "0.42214936", "0.42195168", "0.42177564", "0.4212716", "0.421081", "0.42016107", "0.42005593", "0.42004818", "0.42003635", "0.41992038", "0.4196498", "0.41895172", "0.4185383", "0.4183898", "0.4170834", "0.41681752", "0.4166983", "0.41586953", "0.41520125", "0.41498396", "0.4148425", "0.41381785", "0.4131248", "0.41307104", "0.41265234", "0.41263157", "0.41227534", "0.4121393" ]
0.7068375
0
Transfer merges the data from src into this Histogram and clears src.
func (h *Histogram) Transfer(src *Histogram) { if src.Count == 0 { return } if h.Count == 0 { h.CopyFrom(src) src.Reset() return } h.copyHDataFrom(src) h.Counter.Transfer(&src.Counter) src.Reset() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (h *Histogram) copyHDataFrom(src *Histogram) {\n\tif h.Divider == src.Divider && h.Offset == src.Offset {\n\t\tfor i := 0; i < len(h.Hdata); i++ {\n\t\t\th.Hdata[i] += src.Hdata[i]\n\t\t}\n\t\treturn\n\t}\n\n\thData := src.Export()\n\tfor _, data := range hData.Data {\n\t\th.record((data.Start+data.End)/2, int(data.Count))\n\t}\n}", "func (h *Histogram) CopyFrom(src *Histogram) {\n\th.Counter = src.Counter\n\th.copyHDataFrom(src)\n}", "func (c *Counter) Transfer(src *Counter) {\n\tif src.Count == 0 {\n\t\treturn // nothing to do\n\t}\n\tif c.Count == 0 {\n\t\t*c = *src // copy everything at once\n\t\tsrc.Reset()\n\t\treturn\n\t}\n\tc.Count += src.Count\n\tif src.Min < c.Min {\n\t\tc.Min = src.Min\n\t}\n\tif src.Max > c.Max {\n\t\tc.Max = src.Max\n\t}\n\tc.Sum += src.Sum\n\tc.sumOfSquares += src.sumOfSquares\n\tsrc.Reset()\n}", "func (ms HistogramBucket) CopyTo(dest HistogramBucket) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetCount(ms.Count())\n\tms.Exemplar().CopyTo(dest.Exemplar())\n}", "func (h *Histogram) Reset() {\n\th.Counter.Reset()\n\t// Leave Offset and Divider alone\n\tfor i := 0; i < len(h.Hdata); i++ {\n\t\th.Hdata[i] = 0\n\t}\n}", "func (ms HistogramDataPoint) CopyTo(dest HistogramDataPoint) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.LabelsMap().CopyTo(dest.LabelsMap())\n\tdest.SetStartTime(ms.StartTime())\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetCount(ms.Count())\n\tdest.SetSum(ms.Sum())\n\tms.Buckets().CopyTo(dest.Buckets())\n\tdest.SetExplicitBounds(ms.ExplicitBounds())\n}", "func (s *UniformSample) Clear() {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\ts.count = 0\n\ts.values = make([]int64, 0, s.reservoirSize)\n}", "func (o *CompartimentoHistorico) UnsetData() {\n\to.Data.Unset()\n}", "func (c *PNGCopyCounters) Clear() {\n\tc.StoredBytes = 0\n\tif c.CopiedBytes >= c.ToCopy && c.Created {\n\t\tc.Image = nil\n\t\tc.Buffer = nil\n\t}\n}", "func (tt *TtTable) Clear() {\n\t// Create new slice/array - garbage collections takes care of cleanup\n\ttt.data = make([]TtEntry, tt.maxNumberOfEntries, tt.maxNumberOfEntries)\n\ttt.numberOfEntries = 0\n\ttt.Stats = TtStats{}\n}", "func (ds *Dataset) Clear() {\n\tds.min = math.MaxFloat64\n\tds.max = math.SmallestNonzeroFloat64\n\tds.product = 1\n\tds.total = 0\n\tds.recipsum = 0\n\tds.values = ds.values[:0]\n}", "func (b *Buffer) Clear() {\n\tb.series = make(map[string]*influxdb.Series)\n\tb.size = 0\n}", "func (this *channelStruct) Clear() {\n\tthis.samples = make([]float64, 0)\n}", "func (v *Data) UClear() {\n\t*v = (*v)[:0]\n}", "func (ms HistogramBucketExemplar) CopyTo(dest HistogramBucketExemplar) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetValue(ms.Value())\n\tms.Attachments().CopyTo(dest.Attachments())\n}", "func (es HistogramBucketSlice) CopyTo(dest HistogramBucketSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint_Bucket(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint_Bucket, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramBucket(&el).CopyTo(newHistogramBucket(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func (v *Data) Clear() {\n\tv.Truncate(0)\n}", "func (queue *Queue) Clear() {\n\tqueue.data = queue.data[:0]\n}", "func (es HistogramDataPointSlice) CopyTo(dest HistogramDataPointSlice) {\n\tnewLen := es.Len()\n\tif newLen == 0 {\n\t\t*dest.orig = []*otlpmetrics.HistogramDataPoint(nil)\n\t\treturn\n\t}\n\toldLen := dest.Len()\n\tif newLen <= oldLen {\n\t\t(*dest.orig) = (*dest.orig)[:newLen]\n\t\tfor i, el := range *es.orig {\n\t\t\tnewHistogramDataPoint(&el).CopyTo(newHistogramDataPoint(&(*dest.orig)[i]))\n\t\t}\n\t\treturn\n\t}\n\torigs := make([]otlpmetrics.HistogramDataPoint, newLen)\n\twrappers := make([]*otlpmetrics.HistogramDataPoint, newLen)\n\tfor i, el := range *es.orig {\n\t\twrappers[i] = &origs[i]\n\t\tnewHistogramDataPoint(&el).CopyTo(newHistogramDataPoint(&wrappers[i]))\n\t}\n\t*dest.orig = wrappers\n}", "func (ht *HashTable) Clear() (err error) {\n\tif err = ht.DataFile.Clear(); err != nil {\n\t\treturn\n\t}\n\tht.calculateNumBuckets()\n\treturn\n}", "func Merge(h1 *Histogram, h2 *Histogram) *Histogram {\n\tdivider := h1.Divider\n\toffset := h1.Offset\n\tif h2.Divider > h1.Divider {\n\t\tdivider = h2.Divider\n\t}\n\tif h2.Offset < h1.Offset {\n\t\toffset = h2.Offset\n\t}\n\tnewH := NewHistogram(offset, divider)\n\tnewH.Transfer(h1)\n\tnewH.Transfer(h2)\n\treturn newH\n}", "func Copy(h *hdrhistogram.Histogram) *hdrhistogram.Histogram {\n\tdup := hdrhistogram.New(h.LowestTrackableValue(), h.HighestTrackableValue(),\n\t\tint(h.SignificantFigures()))\n\tdup.Merge(h)\n\treturn dup\n}", "func (ms HistogramDataPoint) InitEmpty() {\n\t*ms.orig = &otlpmetrics.HistogramDataPoint{}\n}", "func (c *Chunk) Clear() {\n\tc.data = nil\n}", "func (h *NormalHopper) Refill() {\n\tfor _, v := range h.Source {\n\t\tvar copiedCard Card\n\t\tcopiedCard = v // this copies???\n\t\th.Cards = append(h.Cards, copiedCard)\n\t}\n\trand.Shuffle(len(h.Cards), func(i, j int) {\n\t\th.Cards[i], h.Cards[j] = h.Cards[j], h.Cards[i]\n\t})\n}", "func (r *Ring) Clear() {\n\tr.size, r.in, r.out = 0, 0, 0\n}", "func (dst *Hosts) Merge(src Hosts) {\n\tif dst == nil || len(src) == 0 {\n\t\treturn\n\t}\n\n\tcopied := *dst\n\tcopied = append(copied, src...)\n\n\tregistry := map[string]int{}\n\tfor i := len(copied); i > 0; i-- {\n\t\tregistry[copied[i-1].Name] = i - 1\n\t}\n\tunique := copied[:0]\n\tfor i, host := range copied {\n\t\torigin := registry[host.Name]\n\t\tif i == origin {\n\t\t\tunique = append(unique, host)\n\t\t\tcontinue\n\t\t}\n\t\tunique[origin].Merge(host)\n\t}\n\n\t*dst = unique\n}", "func ClearHistogramTransferFile() error {\n\treturn clearHistogramTransferFileByName(histogramTransferFile)\n}", "func (ingest *Ingestion) Clear(start int64, end int64) error {\n\tclear := ingest.DB.DeleteRange\n\n\terr := clear(start, end, \"history_effects\", \"history_operation_id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = clear(start, end, \"history_operation_participants\", \"history_operation_id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = clear(start, end, \"history_operations\", \"id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = clear(start, end, \"history_transaction_participants\", \"history_transaction_id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = clear(start, end, \"history_transactions\", \"id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = clear(start, end, \"history_ledgers\", \"id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = clear(start, end, \"history_trades\", \"history_operation_id\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (r *PackageAggRow) ClearCount() { r.Data.Count = nil }", "func (d Data) Del(key uint32) {\n\td.mutex.Lock()\n\tcount := d.counts[key]\n\tcount -= 1\n\tif count < 1 {\n\t\tdelete(d.data, key)\n\t\tdelete(d.counts, key)\n\t} else {\n\t\td.counts[key] = count\n\t}\n\td.mutex.Unlock()\n}", "func (d *Dropping) Reset() {\n\t// We need to create a new slice because the existing one\n\t// may have already been put onto o channel or referenced\n\t// in another way.\n\td.batch = make(Batch, d.maxSize)\n\td.next = 0\n}", "func (rra *RoundRobinArchive) clear() {\n\tif len(rra.dps) > 0 {\n\t\trra.dps = make(map[int64]float64)\n\t}\n}", "func (rc *ReadCache) Clear() {\n\trc.lock.Lock()\n\trc.bins = nil\n\trc.bins = make(map[hash.Hash160][]byte)\n\trc.lock.Unlock()\n}", "func (r *Transformer) flushSource(source string, deleteSource bool) error {\n\tbatch := r.batchMap[source]\n\t// Skip flushing a combined log if the batch is empty\n\tif batch == nil {\n\t\treturn nil\n\t}\n\n\tif len(batch.entries) == 0 {\n\t\tr.removeBatch(source)\n\t\treturn nil\n\t}\n\n\t// Choose which entry we want to keep the rest of the fields from\n\tvar base *entry.Entry\n\tentries := batch.entries\n\n\tif r.overwriteWithOldest {\n\t\tbase = entries[0]\n\t} else {\n\t\tbase = entries[len(entries)-1]\n\t}\n\n\t// Set the recombined field on the entry\n\terr := base.Set(r.combineField, batch.recombined.String())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tr.Write(context.Background(), base)\n\tif deleteSource {\n\t\tr.removeBatch(source)\n\t} else {\n\t\tbatch.entries = batch.entries[:0]\n\t\tbatch.recombined.Reset()\n\t}\n\n\treturn nil\n}", "func (m *IntervalMutation) ClearDataSource() {\n\tm.cleareddata_source = true\n}", "func (es HistogramDataPointSlice) MoveAndAppendTo(dest HistogramDataPointSlice) {\n\tif es.Len() == 0 {\n\t\t// Just to ensure that we always return a Slice with nil elements.\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\tif dest.Len() == 0 {\n\t\t*dest.orig = *es.orig\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\t*dest.orig = append(*dest.orig, *es.orig...)\n\t*es.orig = nil\n\treturn\n}", "func (b *Buffer) Clear() {\n\tb.currentSize = 0\n\tb.contents = map[entity.Key]inventoryapi.PostDeltaBody{}\n}", "func (ss *SequenceStats) Clear() {\n\tss.Min.Clear()\n\tss.Max.Clear()\n\tss.Variance.Reset()\n}", "func (b *Buf) Reset() { b.b = b.b[:0] }", "func getHistogram(src [][3]int, size float64, pixels *[HistSize][3]float64, hist *[HistSize]float64) {\n\tvar ind, r, g, b, i int\n\tvar inr, ing, inb int\n\n\tfor i = range src {\n\t\tr = src[i][0]\n\t\tg = src[i][1]\n\t\tb = src[i][2]\n\n\t\tinr = r >> Shift\n\t\ting = g >> Shift\n\t\tinb = b >> Shift\n\n\t\tind = (inr << (2 * HistBits)) + (ing << HistBits) + inb\n\t\tpixels[ind][0], pixels[ind][1], pixels[ind][2] = float64(r), float64(g), float64(b)\n\t\thist[ind]++\n\t}\n\n\t// normalize weight by the number of pixels in the image\n\tfor i = 0; i < HistSize; i++ {\n\t\thist[i] /= size\n\t}\n}", "func (r *RunningStats) Clear() {\n\tr.n = 0\n\tr.m1 = 0.0\n\tr.m2 = 0.0\n\tr.m3 = 0.0\n\tr.m4 = 0.0\n}", "func (frac *Fractal) Clear() {\n\tfrac.R = histo.New(frac.Width, frac.Height)\n\tfrac.G = histo.New(frac.Width, frac.Height)\n\tfrac.B = histo.New(frac.Width, frac.Height)\n}", "func (d PacketData) Merge(oth PacketData) {\n\toth.pk.buf.TrimFront(int64(oth.pk.dataOffset()))\n\td.pk.buf.Merge(&oth.pk.buf)\n}", "func (fr *Frame) Copy(orig *Frame) {\n\tfr.Status = orig.Status\n\tfor y, row := range orig.Pix {\n\t\tcopy(fr.Pix[y][:], row)\n\t}\n}", "func clearHistogramTransferFileByName(fileName string) error {\n\tfile, err := os.OpenFile(fileName, os.O_RDWR, 0666)\n\tif os.IsNotExist(err) {\n\t\t// File doesn't exist, so it's already truncated.\n\t\treturn nil\n\t}\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"unable to open %s\", fileName)\n\t}\n\tdefer file.Close()\n\n\tif err := unix.Flock(int(file.Fd()), unix.LOCK_EX); err != nil {\n\t\treturn errors.Wrapf(err, \"unable to lock %s\", fileName)\n\t}\n\tdefer unix.Flock(int(file.Fd()), unix.LOCK_UN)\n\n\tif err := unix.Ftruncate(int(file.Fd()), 0); err != nil {\n\t\treturn errors.Wrapf(err, \"unable to truncate %s\", fileName)\n\t}\n\n\treturn nil\n}", "func (h *MemHash) Reset() {\n\th.buf = h.buf[:0]\n}", "func (e *dataUsageEntry) merge(other dataUsageEntry) {\n\te.Objects += other.Objects\n\te.Versions += other.Versions\n\te.Size += other.Size\n\tors := other.ReplicationStats\n\tempty := replicationStats{}\n\tif ors != nil && *ors != empty {\n\t\tif e.ReplicationStats == nil {\n\t\t\te.ReplicationStats = &replicationStats{}\n\t\t}\n\t\te.ReplicationStats.PendingSize += other.ReplicationStats.PendingSize\n\t\te.ReplicationStats.FailedSize += other.ReplicationStats.FailedSize\n\t\te.ReplicationStats.ReplicatedSize += other.ReplicationStats.ReplicatedSize\n\t\te.ReplicationStats.ReplicaSize += other.ReplicationStats.ReplicaSize\n\t\te.ReplicationStats.PendingCount += other.ReplicationStats.PendingCount\n\t\te.ReplicationStats.FailedCount += other.ReplicationStats.FailedCount\n\n\t}\n\n\tfor i, v := range other.ObjSizes[:] {\n\t\te.ObjSizes[i] += v\n\t}\n}", "func (r *PresampledResampler) Reset() {\n\tr.sampleAggregates = r.sampleAggregates[:0]\n}", "func (h *BasicLandHopper) Refill() {\n\tfor _, v := range h.Source {\n\t\tvar copiedCard Card\n\t\tcopiedCard = v // this copies???\n\t\th.Cards = append(h.Cards, copiedCard)\n\t}\n\t// no need to shuffle\n}", "func (r *BasicResampler) Reset() {\n\tr.sampleAggregates = r.sampleAggregates[:0]\n}", "func (rb *RingBuffer[T]) Clear() {\n\trb.mu.Lock()\n\tdefer rb.mu.Unlock()\n\trb.pos = 0\n\trb.buf = nil\n}", "func (h *FoilHopper) Refill() {\n\tfor _, v := range h.Source {\n\t\tvar copiedCard Card\n\t\tcopiedCard = v // this copies???\n\t\th.Cards = append(h.Cards, copiedCard)\n\t}\n\trand.Shuffle(len(h.Cards), func(i, j int) {\n\t\th.Cards[i], h.Cards[j] = h.Cards[j], h.Cards[i]\n\t})\n}", "func (s *segment) merge(oth *segment) {\n\ts.pkt.Data().Merge(oth.pkt.Data())\n\ts.dataMemSize = s.pkt.MemSize()\n\toth.dataMemSize = oth.pkt.MemSize()\n}", "func (tr *trooper) demerge() {\n\ttr.trash()\n\ttr.addCenter()\n\tfor _, b := range tr.bits {\n\t\tb.reset(b.box().cmax)\n\t}\n\ttr.detach()\n}", "func (s *dataSet) Reset() {\n\ts.dataPtr = len(s.buf)\n\ts.dataWritten = 0\n\ts.idxPtr = 0\n\ts.idxWritten = 0\n}", "func (t *Track) Clean() {\n\tfor i := 0; i < len(t.samples); i++ {\n\t\tt.samples[i] = nil\n\t}\n\tt.samples = t.samples[:0]\n\tt.samples = nil\n}", "func (es HistogramBucketSlice) MoveAndAppendTo(dest HistogramBucketSlice) {\n\tif es.Len() == 0 {\n\t\t// Just to ensure that we always return a Slice with nil elements.\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\tif dest.Len() == 0 {\n\t\t*dest.orig = *es.orig\n\t\t*es.orig = nil\n\t\treturn\n\t}\n\t*dest.orig = append(*dest.orig, *es.orig...)\n\t*es.orig = nil\n\treturn\n}", "func (s *IntSlicer) Clear() {\n\ts.slice = []int{}\n}", "func (c *MockSource) Del(ctx *Context, next Next) {\n\t// source cache, like mysql/oss/rdb, etc\n\t// do not delete the data in source cache\n}", "func (b *ringBuf) clearTo(hi uint64) (removedBytes, removedEntries int32) {\n\tif b.len == 0 || hi < first(b).index(b) {\n\t\treturn\n\t}\n\tit, ok := first(b), true\n\tfirstIndex := it.index(b)\n\tfor ok && it.index(b) < hi {\n\t\tremovedBytes += int32(it.entry(b).Size())\n\t\tremovedEntries++\n\t\tit.clear(b)\n\t\tit, ok = it.next(b)\n\t}\n\toffset := int(hi - firstIndex)\n\tif offset > b.len {\n\t\toffset = b.len\n\t}\n\tb.len = b.len - offset\n\tb.head = (b.head + offset) % len(b.buf)\n\tif b.len < (len(b.buf) / shrinkThreshold) {\n\t\trealloc(b, 0, b.len)\n\t}\n\treturn\n}", "func (this *FeedableBuffer) OverwriteHead(bytes []byte) {\n\tcopy(this.Data, bytes)\n}", "func (ms HistogramBucket) InitEmpty() {\n\t*ms.orig = &otlpmetrics.HistogramDataPoint_Bucket{}\n}", "func (h *History) Clear() {\n\th.tmp = make([]string, len(h.histories))\n\tfor i := range h.histories {\n\t\th.tmp[i] = h.histories[i]\n\t}\n\th.tmp = append(h.tmp, \"\")\n\th.selected = len(h.tmp) - 1\n}", "func (c *PNGCopyCounters) Reset() {\n\tc.ToCopy = 400\n\tc.Layers = 0\n\tc.CopiedBytes = 0\n\tc.StoredBytes = 0\n\tside := int(math.Sqrt(float64(c.ToCopy)))\n\tc.Image = image.NewGray(image.Rect(0, 0, side, side))\n\tc.Image.Pix = make([]byte, c.ToCopy)\n\tc.Buffer = &bytes.Buffer{}\n\tc.Buffer.Reset()\n}", "func (dst *Proxies) Merge(src Proxies) {\n\tif dst == nil || len(src) == 0 {\n\t\treturn\n\t}\n\n\tcopied := *dst\n\tcopied = append(copied, src...)\n\n\tregistry := map[string]int{}\n\tfor i := len(copied); i > 0; i-- {\n\t\tregistry[copied[i-1].Name] = i - 1\n\t}\n\tunique := copied[:0]\n\tfor i, proxy := range copied {\n\t\torigin := registry[proxy.Name]\n\t\tif i == origin {\n\t\t\tunique = append(unique, proxy)\n\t\t\tcontinue\n\t\t}\n\t\tunique[origin].Merge(proxy)\n\t}\n\n\t*dst = unique\n}", "func (d *Driver) Clear() {\n\tfor i := 0; i < len(d.buff); i++ {\n\t\td.buff[i] = 0\n\t}\n}", "func (h *MapInt16ToInt8) Clear() {\n\tfor i, e := range h.slots {\n\t\tif e != nil {\n\t\t\tfor e != nil {\n\t\t\t\tn := e.next\n\t\t\t\th.free(e)\n\t\t\t\te = n\n\t\t\t}\n\t\t\th.slots[i] = nil\n\t\t}\n\t}\n\th.used = 0\n}", "func (dst *Proxy) Merge(src Proxy) {\n\tif dst == nil || dst.Name != src.Name {\n\t\treturn\n\t}\n\n\tif src.Enabled != nil {\n\t\tdst.Enabled = src.Enabled\n\t}\n\tdst.Hosts.Merge(src.Hosts)\n}", "func (ms SummaryDataPoint) CopyTo(dest SummaryDataPoint) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tms.LabelsMap().CopyTo(dest.LabelsMap())\n\tdest.SetStartTime(ms.StartTime())\n\tdest.SetTimestamp(ms.Timestamp())\n\tdest.SetCount(ms.Count())\n\tdest.SetSum(ms.Sum())\n\tms.ValueAtPercentiles().CopyTo(dest.ValueAtPercentiles())\n}", "func (r *Rx) Flush() {\n\tif r.head == r.tail {\n\t\treturn\n\t}\n\tr.p.writeAtomic(r.tail, slotFree|slotToPayloadSize(r.head-r.tail))\n\tr.tail = r.head\n}", "func (file *File) Redo() {\n\tif file.buffHist == nil {\n\t\treturn\n\t}\n\tbuffer, mc := file.buffHist.Next()\n\tfile.buffer.ReplaceBuffer(buffer)\n\tfile.MultiCursor.ReplaceMC(mc)\n}", "func (i *IQR) Clear() {\n\ti.quantile.Clear()\n}", "func (dst *PostgreSQL) Merge(src *PostgreSQL) {\n\tif dst == nil || src == nil {\n\t\treturn\n\t}\n\n\tif src.Enabled != nil {\n\t\tdst.Enabled = src.Enabled\n\t}\n\tif src.Version != \"\" {\n\t\tdst.Version = src.Version\n\t}\n\tif src.Size != \"\" {\n\t\tdst.Size = src.Size\n\t}\n\n\tif src.Fixtures != nil {\n\t\tdst.Fixtures = src.Fixtures\n\t}\n\tif src.OwnName != nil {\n\t\tdst.OwnName = src.OwnName\n\t}\n\n\tif src.DataBus != nil && dst.DataBus == nil {\n\t\tdst.DataBus = new(DataBus)\n\t}\n\tdst.DataBus.Merge(src.DataBus)\n}", "func (ms SummaryValueAtPercentile) CopyTo(dest SummaryValueAtPercentile) {\n\tif ms.IsNil() {\n\t\t*dest.orig = nil\n\t\treturn\n\t}\n\tif dest.IsNil() {\n\t\tdest.InitEmpty()\n\t}\n\tdest.SetPercentile(ms.Percentile())\n\tdest.SetValue(ms.Value())\n}", "func (m *MockHistogram) Merge(other Histogram) {\n\tm.Called(other)\n}", "func (h *MapInt16ToUint8) Clear() {\n\tfor i, e := range h.slots {\n\t\tif e != nil {\n\t\t\tfor e != nil {\n\t\t\t\tn := e.next\n\t\t\t\th.free(e)\n\t\t\t\te = n\n\t\t\t}\n\t\t\th.slots[i] = nil\n\t\t}\n\t}\n\th.used = 0\n}", "func (dst *Host) Merge(src Host) {\n\tif dst == nil || dst.Name != src.Name {\n\t\treturn\n\t}\n\n\tif src.AgentPort != 0 {\n\t\tdst.AgentPort = src.AgentPort\n\t}\n\tif src.Connections != 0 {\n\t\tdst.Connections = src.Connections\n\t}\n\tif src.MaxConns != 0 {\n\t\tdst.MaxConns = src.MaxConns\n\t}\n\tif src.Weight != 0 {\n\t\tdst.Weight = src.Weight\n\t}\n\tif src.Backup != nil {\n\t\tdst.Backup = src.Backup\n\t}\n}", "func (b *Buffer) ClearFrom(o int) {\n\tfor l := len(b.Tiles); o < l; o++ {\n\t\tb.Tiles[o] = nil\n\t}\n}", "func (s *Streams) Clear() {\n\ts.Buffer.Clear()\n}", "func (r *TimeBucketResults) Clear() {\n\tr.buckets = nil\n}", "func (np *NodePool) Clear() {\n\tassert.True(int(np.hashSize) == len(np.first), \"np.HashSize == len(np.First)\")\n\tfor idx := range np.first {\n\t\tnp.first[idx] = nullIdx\n\t}\n\tnp.nodeCount = 0\n}", "func (b *batch) Reset() {\n\tb.batch.Clear()\n\tb.size = 0\n}", "func (b *baseKVStoreBatch) Clear() {\n\tb.mutex.Lock()\n\tdefer b.mutex.Unlock()\n\tb.writeQueue = nil\n\n\tb.fillLock.Lock()\n\tdefer b.fillLock.Unlock()\n\tfor k := range b.fill {\n\t\tdelete(b.fill, k)\n\t}\n}", "func (e *fastGen) Reset() {\n\tif cap(e.hist) < allocHistory {\n\t\te.hist = make([]byte, 0, allocHistory)\n\t}\n\t// We offset current position so everything will be out of reach.\n\t// If we are above the buffer reset it will be cleared anyway since len(hist) == 0.\n\tif e.cur <= bufferReset {\n\t\te.cur += maxMatchOffset + int32(len(e.hist))\n\t}\n\te.hist = e.hist[:0]\n}", "func (h *Histogram) Clone() *Histogram {\n\tcopy := NewHistogram(h.Offset, h.Divider)\n\tcopy.CopyFrom(h)\n\treturn copy\n}", "func (b *batch) Reset() {\n\tb.writes = b.writes[:0]\n\tb.size = 0\n}", "func (a *Agent) flush() {\n\tif len(a.Buf) != 0 {\n\t\ta.Sketch.insert(agentConfig, a.Buf)\n\t\ta.Buf = nil\n\t}\n\n\tif len(a.CountBuf) != 0 {\n\t\ta.Sketch.insertCounts(agentConfig, a.CountBuf)\n\t\ta.CountBuf = nil\n\t}\n}", "func (s *SharedMemorySegment) Clear() {\n\tfor i := 0; i < len(s.data); i++ {\n\t\ts.data[i] = 0\n\t}\n}", "func (c *Concurrent) Clear() {\n\tfor {\n\t\tselect {\n\t\tcase <-c.values:\n\t\tdefault:\n\t\t\treturn\n\t\t}\n\t}\n}", "func (dst *Worker) Merge(src Worker) {\n\tif dst == nil || dst.Name != src.Name {\n\t\treturn\n\t}\n\n\tif src.Enabled != nil {\n\t\tdst.Enabled = src.Enabled\n\t}\n\tif src.Command != \"\" {\n\t\tdst.Command = src.Command\n\t}\n\tif len(src.Commands) > 0 {\n\t\tdst.Commands = strings.Unique(append(dst.Commands, src.Commands...))\n\t}\n\tif src.Replicas != 0 {\n\t\tdst.Replicas = src.Replicas\n\t}\n\tif src.LivenessProbe != \"\" {\n\t\tdst.LivenessProbe = src.LivenessProbe\n\t}\n\tif src.Size != \"\" {\n\t\tdst.Size = src.Size\n\t}\n\n\tif src.Resources != nil && dst.Resources == nil {\n\t\tdst.Resources = new(Resources)\n\t}\n\tdst.Resources.Merge(src.Resources)\n}", "func (d *OneToOne) Set(data GenericDataType) {\n\tidx := d.writeIndex % uint64(len(d.buffer))\n\n\tnewBucket := &bucket{\n\t\tdata: data,\n\t\tseq: d.writeIndex,\n\t}\n\td.writeIndex++\n\n\tatomic.StorePointer(&d.buffer[idx], unsafe.Pointer(newBucket))\n}", "func (h *atomicHeadTailIndex) reset() {\n\th.u.Store(0)\n}", "func (b *Buffer) ClearTo(o int) {\n\tfor o = calc.MinInt(o, len(b.Tiles)); o >= 0; o-- {\n\t\tb.Tiles[o] = nil\n\t}\n}", "func Copy(source KVStore, target KVStore) error {\n\n\tvar innerErr error\n\tif err := source.Iterate(EmptyPrefix, func(key, value Value) bool {\n\t\tif err := target.Set(key, value); err != nil {\n\t\t\tinnerErr = err\n\t\t}\n\n\t\treturn innerErr == nil\n\t}); err != nil {\n\t\treturn err\n\t}\n\n\tif innerErr != nil {\n\t\treturn innerErr\n\t}\n\n\treturn target.Flush()\n}", "func (l *List) Clear() {\n\tl.Source = nil\n}", "func (bio *BinaryIO) Copy(dst int64, src int64, count int) error {\n\tbuf := makeBuf(count)\n\tfor count > 0 {\n\t\tbuf = truncBuf(buf, count)\n\t\tbio.ReadAt(src, buf)\n\t\tbio.WriteAt(dst, buf)\n\t\tcount -= len(buf)\n\t\tsrc += int64(len(buf))\n\t\tdst += int64(len(buf))\n\t}\n\treturn nil\n}", "func (g *Grid) Clear() { g.rows = []ui.GridBufferer{} }", "func (h *MapInt16ToInt64) Clear() {\n\tfor i, e := range h.slots {\n\t\tif e != nil {\n\t\t\tfor e != nil {\n\t\t\t\tn := e.next\n\t\t\t\th.free(e)\n\t\t\t\te = n\n\t\t\t}\n\t\t\th.slots[i] = nil\n\t\t}\n\t}\n\th.used = 0\n}", "func (ms SummaryDataPoint) InitEmpty() {\n\t*ms.orig = &otlpmetrics.SummaryDataPoint{}\n}" ]
[ "0.65698063", "0.6046923", "0.56561273", "0.56340873", "0.55626523", "0.54806465", "0.54560703", "0.53647286", "0.53194284", "0.5260919", "0.524572", "0.51967084", "0.5111866", "0.51025105", "0.5102197", "0.5099957", "0.50868666", "0.5073424", "0.5062224", "0.50565904", "0.5014057", "0.5012527", "0.50008947", "0.49653497", "0.49541003", "0.49490115", "0.49425194", "0.49297556", "0.49208528", "0.4890777", "0.4873825", "0.48301157", "0.48161876", "0.48085243", "0.4806533", "0.47933188", "0.47799775", "0.47763023", "0.47757986", "0.47737634", "0.4771118", "0.47706026", "0.47698495", "0.47607693", "0.4759991", "0.47589195", "0.4757012", "0.47518703", "0.47437918", "0.47434953", "0.47382548", "0.4737894", "0.4730853", "0.47301948", "0.47231564", "0.4699262", "0.46975502", "0.46899262", "0.4685924", "0.4670318", "0.4666613", "0.46624696", "0.46533912", "0.46515048", "0.4636421", "0.46291152", "0.4628398", "0.46281105", "0.46212077", "0.46135345", "0.46070138", "0.45995227", "0.45946437", "0.45916477", "0.45887956", "0.45827413", "0.45804462", "0.45803022", "0.45791754", "0.45789406", "0.45572308", "0.4552617", "0.4551966", "0.45518613", "0.4548749", "0.45483607", "0.45430416", "0.45428", "0.45379883", "0.45366627", "0.4536632", "0.4533692", "0.45317915", "0.45261088", "0.4523242", "0.45211416", "0.45210123", "0.4519155", "0.4517922", "0.45135677" ]
0.70465165
0
ParsePercentiles extracts the percentiles from string (flag).
func ParsePercentiles(percentiles string) ([]float64, error) { percs := strings.Split(percentiles, ",") // will make a size 1 array for empty input! res := make([]float64, 0, len(percs)) for _, pStr := range percs { pStr = strings.TrimSpace(pStr) if len(pStr) == 0 { continue } p, err := strconv.ParseFloat(pStr, 64) if err != nil { return res, err } res = append(res, p) } if len(res) == 0 { return res, errors.New("list can't be empty") } log.LogVf("Will use %v for percentiles", res) return res, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func PercentilesFilter(percentiles []float64) Filter {\n\ts := make([]string, 0, len(percentiles))\n\tfor _, v := range percentiles {\n\t\ts = append(s, fmt.Sprintf(\"%v\", v))\n\t}\n\tj := strings.Join(s, \",\")\n\treturn Param(\"percentiles\", j)\n}", "func parsePercentage(str string, dest *float64) error {\n\tstr = strings.Replace(str, \"%\", \"\", -1)\n\treturn parseFloat64(str, dest)\n}", "func Percentile(by []string, percentile float64, input []*oproto.ValueStream) []*oproto.ValueStream {\n\toutput := []*oproto.ValueStream{{Variable: input[0].Variable}}\n\treturn output\n}", "func NewPercentiles(label string, values []float64) *Percentiles {\n\tvalues = append(values[:0:0], values...)\n\tsort.Float64s(values)\n\n\tpoints := make([]Point, 0, len(values))\n\n\tmultiplier := 1 / float64(len(values)+1)\n\tfor i, v := range values {\n\t\tvar p Point\n\t\tp.X = float64(i+1) * multiplier\n\t\tp.Y = v\n\t\tpoints = append(points, p)\n\t}\n\n\treturn &Percentiles{\n\t\tLabel: label,\n\t\tData: points,\n\t}\n}", "func (b BundledStates) Percentile(t time.Time, percentile int) time.Duration {\n\tif percentile > 100 || percentile <= 0 {\n\t\tpanic(fmt.Errorf(\"percentile %d is out of scope\", percentile))\n\t}\n\n\tages := []time.Duration{}\n\tfor _, age := range b.ages(t) {\n\t\tages = append(ages, age)\n\t}\n\n\tif len(ages) == 0 {\n\t\treturn 0\n\t}\n\n\tsort.Sort(ByDuration(ages))\n\n\tindex := int(math.Ceil(float64(percentile)*float64(len(ages))/100) - 1)\n\tif index >= len(ages) {\n\t\tpanic(fmt.Errorf(\"Index is out of range: %d/%d\", index, len(ages)))\n\t}\n\treturn ages[index]\n}", "func Percentile(input []float64, percent float64) float64 {\n\tif len(input) == 0 {\n\t\treturn 0\n\t}\n\n\treturn PercentileOfSorted(SortCopy(input), percent)\n}", "func (ms SummaryValueAtPercentile) SetPercentile(v float64) {\n\t(*ms.orig).Percentile = v\n}", "func percentile(data []float64) (p25 float64, p75 float64) {\n\n\tN := float64(len(data))\n\tloc25 := int(.25 * N)\n\tloc75 := int(.75 * N)\n\tsafeData := make([]float64, len(data))\n\tcopy(safeData, data)\n\tsort.Float64s(safeData)\n\n\treturn safeData[loc25], safeData[loc75]\n}", "func Percentile(ts []Transaction, percent float64) float64 {\n\tSort(&ts)\n\ti := int(math.Floor(float64(len(ts)) * percent))\n\tvalue := ts[i].amount\n\treturn value\n}", "func (t *TimerSnapshot) Percentiles(ps []float64) []float64 {\n\treturn t.histogram.Percentiles(ps)\n}", "func percentile(dps Series, args ...float64) (a float64) {\n\tp := args[0]\n\tvar x []float64\n\tfor _, v := range dps {\n\t\tx = append(x, float64(v))\n\t}\n\tsort.Float64s(x)\n\tif p <= 0 {\n\t\treturn x[0]\n\t}\n\tif p >= 1 {\n\t\treturn x[len(x)-1]\n\t}\n\ti := p * float64(len(x)-1)\n\ti = math.Ceil(i)\n\treturn x[int(i)]\n}", "func percentile(orderedObservations []float64, l int, p float64) float64 {\n\treturn orderedObservations[int(p*float64(l))]\n}", "func TestPercentile(t *testing.T) {\n\ttables := []struct{\n\t\tvals []int // input slice\n\t\tpercentile float64 // percentile value\n\t}{\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t0},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t10},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t20},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t30},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t40},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t50},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t60},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t70},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t80},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t90},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t100},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t92},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t95},\n\t\t{[]int{10, 8, 7, 6, 9, 3, 4, 5, 1, 2},\n\t\t\t99},\n\t}\n\n\tfor _, tt := range tables {\n\t\tperc, _ := Percentile(tt.vals, tt.percentile)\n\t\tfmt.Println(perc)\n\t}\n}", "func (s *ModelLatencyThreshold) SetPercentile(v string) *ModelLatencyThreshold {\n\ts.Percentile = &v\n\treturn s\n}", "func (s *UniformSample) Percentiles(ps []float64) []float64 {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\treturn gometrics.SamplePercentiles(s.values, ps)\n}", "func (t *StandardTimer) Percentiles(ps []float64) []float64 {\n\treturn t.histogram.Percentiles(ps)\n}", "func (t *TimerSnapshot) Percentile(p float64) float64 {\n\treturn t.histogram.Percentile(p)\n}", "func (ms SummaryValueAtPercentile) Percentile() float64 {\n\treturn (*ms.orig).Percentile\n}", "func (NilTimer) Percentiles(ps []float64) []float64 {\n\treturn make([]float64, len(ps))\n}", "func NewPercentileTransform(levels int) *PercentileTransform {\n\tbase := math.Pow(0.1, float64(levels))\n\treturn &PercentileTransform{\n\t\tlevels: levels,\n\t\tbase: base,\n\t\tmulbase: 1 / math.Log(base),\n\t}\n}", "func (e *HistogramData) CalcPercentiles(percentiles []float64) *HistogramData {\n\tif e.Count == 0 {\n\t\treturn e\n\t}\n\tfor _, p := range percentiles {\n\t\te.Percentiles = append(e.Percentiles, Percentile{p, e.CalcPercentile(p)})\n\t}\n\treturn e\n}", "func (c *Collector) Percentile(percent float64) float64 {\n\tc.Lock()\n\tdefer c.Unlock()\n\ttargetCount := int(percent * float64(c.Count) / 100)\n\n\tcount := 0\n\tfor _, b := range c.Buckets {\n\t\tcount += b.Count\n\t\tif count >= targetCount {\n\t\t\treturn b.Max\n\t\t}\n\t}\n\n\treturn c.Max\n}", "func (t *StandardTimer) Percentile(p float64) float64 {\n\treturn t.histogram.Percentile(p)\n}", "func PercentileOfSorted(sortedInput []float64, percent float64) float64 {\n\tindex := (percent / 100.0) * float64(len(sortedInput))\n\tpercentile := float64(0)\n\ti := int(math.RoundToEven(index))\n\tif index == float64(int64(index)) {\n\t\tpercentile = (sortedInput[i-1] + sortedInput[i]) / 2.0\n\t} else {\n\t\tpercentile = sortedInput[i-1]\n\t}\n\n\treturn percentile\n}", "func (m *MockHistogram) Percentile(percentile float64) float64 {\n\targs := m.Called(percentile)\n\treturn args.Get(0).(float64)\n}", "func ReducePercentile(percentile float64) ReduceFunc {\n\treturn func(values []interface{}) interface{} {\n\t\tvar allValues []float64\n\n\t\tfor _, v := range values {\n\t\t\tif v == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tvals := v.([]interface{})\n\t\t\tfor _, v := range vals {\n\t\t\t\tswitch v.(type) {\n\t\t\t\tcase int64:\n\t\t\t\t\tallValues = append(allValues, float64(v.(int64)))\n\t\t\t\tcase float64:\n\t\t\t\t\tallValues = append(allValues, v.(float64))\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tsort.Float64s(allValues)\n\t\tlength := len(allValues)\n\t\tindex := int(math.Floor(float64(length)*percentile/100.0+0.5)) - 1\n\n\t\tif index < 0 || index >= len(allValues) {\n\t\t\treturn nil\n\t\t}\n\n\t\treturn allValues[index]\n\t}\n}", "func Percentile(k float64, arrays ...float64) (float64, error) {\n\n\t// Check if k is a Non Numeric\n\tif math.IsNaN(k) {\n\t\treturn 0.0, errors.New(\"#VALUE!\t-\tOccurred because the supplied value of k is non-numeric\")\n\t}\n\n\tif len(arrays) == 0 || (k < 0 || k > 100) {\n\t\treturn 0.0, errors.New(\"#NUM!\t-\tOccurred because the supplied value of k is less than 0 or greater than 100 or the array is empty\")\n\t}\n\n\t// Reorder them from the smallest to the largest\n\tsort.Sort(SmallNums(arrays))\n\n\t// n = \\left \\lceil \\frac{P}{100} \\times N \\right \\rceil\n\n\tn := (k / 100) * float64(len(arrays))\n\n\tmathlib.Round(n, 0)\n\n\treturn arrays[int(n)], nil\n}", "func (preferences *ProductPreferences) GetPercentile(val float32) float32 {\r\n\t//TODO: change to bin search.\r\n\tfor i := len(preferences.Percentiles) - 1; i >= 0; i-- {\r\n\t\tif val > preferences.Percentiles[i] {\r\n\t\t\treturn float32(i+1) / float32(DistributionSize+1)\r\n\t\t}\r\n\t}\r\n\treturn 0\r\n}", "func (e *HistogramData) CalcPercentile(percentile float64) float64 {\n\tif len(e.Data) == 0 {\n\t\tlog.Errf(\"Unexpected call to CalcPercentile(%g) with no data\", percentile)\n\t\treturn 0\n\t}\n\tif percentile >= 100 {\n\t\treturn e.Max\n\t}\n\t// We assume Min is at least a single point so at least covers 1/Count %\n\tpp := 100. / float64(e.Count) // previous percentile\n\tif percentile <= pp {\n\t\treturn e.Min\n\t}\n\tfor _, cur := range e.Data {\n\t\tif percentile <= cur.Percent {\n\t\t\treturn cur.Start + (percentile-pp)/(cur.Percent-pp)*(cur.End-cur.Start)\n\t\t}\n\t\tpp = cur.Percent\n\t}\n\treturn e.Max // not reached\n}", "func (p PercentParser) Parse(str string) error {\n\tif err := p.Float64.Parse(str); err != nil {\n\t\treturn err\n\t}\n\n\t// get value\n\tv := *p.Pointer\n\n\t// do validations\n\tif v < 0 || v > 100 {\n\t\treturn errors.New(\"percent is out of bounds\")\n\t}\n\n\treturn nil\n}", "func (s Sample) Percentile(pctile float64) float64 {\n\tif len(s.Xs) == 0 {\n\t\treturn math.NaN()\n\t} else if pctile <= 0 {\n\t\tmin, _ := s.Bounds()\n\t\treturn min\n\t} else if pctile >= 1 {\n\t\t_, max := s.Bounds()\n\t\treturn max\n\t}\n\n\tif !s.Sorted {\n\t\ts = *s.Copy().Sort()\n\t}\n\n\tN := float64(len(s.Xs))\n\tn := 1/3.0 + pctile*(N+1/3.0) // R8\n\tkf, frac := math.Modf(n)\n\tk := int(kf)\n\tif k <= 0 {\n\t\treturn s.Xs[0]\n\t} else if k >= len(s.Xs) {\n\t\treturn s.Xs[len(s.Xs)-1]\n\t}\n\treturn s.Xs[k-1] + frac*(s.Xs[k]-s.Xs[k-1])\n}", "func (h *PCPHistogram) Percentile(p float64) int64 { return h.h.ValueAtQuantile(p) }", "func parsePagination(pagination string) (Selector, error) {\n\tparts := strings.Split(pagination, \",\")\n\tsort.StringSlice(parts).Sort()\n\tvar page, perPage uint64\n\tpage = 0\n\tperPage = 0\n\tvar err error\n\n\tklog.Infof(\"parse pagination:%s\", pagination)\n\n\tif len(pagination) == 0 {\n\t\treturn nil, nil\n\t}\n\n\tfor _, part := range parts {\n\t\tif part == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\tif lhs, rhs, ok := try(part, \"==\"); ok {\n\t\t\tif lhs == pageKeyName {\n\t\t\t\tpage, err = strconv.ParseUint(rhs, 10, 64)\n\t\t\t} else if lhs == perPageKeyName {\n\t\t\t\tperPage, err = strconv.ParseUint(rhs, 10, 64)\n\t\t\t} else {\n\t\t\t\treturn nil, fmt.Errorf(\"invalid pagination: '%s'; can't understand '%s'\", pagination, part)\n\t\t\t}\n\t\t} else if lhs, rhs, ok := try(part, \"=\"); ok {\n\t\t\tif lhs == pageKeyName {\n\t\t\t\tpage, err = strconv.ParseUint(rhs, 10, 64)\n\t\t\t} else if lhs == perPageKeyName {\n\t\t\t\tperPage, err = strconv.ParseUint(rhs, 10, 64)\n\t\t\t} else {\n\t\t\t\treturn nil, fmt.Errorf(\"invalid pagination: '%s'; can't understand '%s'\", pagination, part)\n\t\t\t}\n\t\t} else {\n\t\t\treturn nil, fmt.Errorf(\"invalid pagination: '%s'; can't understand '%s'\", pagination, part)\n\t\t}\n\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"invalid pagination: '%s'; can't understand '%s'\", pagination, part)\n\t\t}\n\t}\n\n\tif page <= 0 || perPage <= 0 {\n\t\treturn nil, fmt.Errorf(\"invalid pagination: '%s'; can't understand the value of page or perPage\", pagination)\n\t}\n\n\t// if perPage <= 0 {\n\t// \tperPage = perPageDefault\n\t// }\n\n\treturn &hasPage{\n\t\titemTotal: 0,\n\t\trequirePagination: [...]uint64{page, perPage},\n\t}, nil\n}", "func makeQuantiles(m *dto.Metric) map[string]interface{} {\n\tfields := make(map[string]interface{})\n\tfor _, q := range m.GetSummary().Quantile {\n\t\tif !math.IsNaN(q.GetValue()) {\n\t\t\t//nolint:unconvert // Conversion may be needed for float64 https://github.com/mdempsky/unconvert/issues/40\n\t\t\tfields[fmt.Sprint(q.GetQuantile())] = float64(q.GetValue())\n\t\t}\n\t}\n\treturn fields\n}", "func ParseHistogram(scale string, histo *dto.Histogram) (buckets HistogramBuckets, err error) {\n\tif histo == nil {\n\t\treturn nil, errors.New(\"nil Histogram\")\n\t}\n\n\ttotal := *histo.SampleCount\n\tn := len(histo.Bucket)\n\n\tbuckets = make(HistogramBuckets, n+1)\n\tbuckets[n] = HistogramBucket{\n\t\tScale: scale,\n\t\tUpperBound: math.MaxFloat64,\n\t\tCount: total,\n\t}\n\tfor idx, bv := range histo.Bucket {\n\t\tbuckets[idx] = HistogramBucket{\n\t\t\tScale: scale,\n\t\t\tUpperBound: *bv.UpperBound,\n\t\t\tCount: *bv.CumulativeCount,\n\t\t}\n\t}\n\tfor idx := n; idx > 0; idx-- { // start from last, end at second to last\n\t\t// convert cumulative count to per-bucket count\n\t\tbuckets[idx].Count = buckets[idx].Count - buckets[idx-1].Count\n\t\t// use previous bucket upper bound as lower bound\n\t\tbuckets[idx].LowerBound = buckets[idx-1].UpperBound\n\t}\n\n\tsort.Sort(HistogramBuckets(buckets))\n\treturn buckets, nil\n}", "func ParseStrings(in ...string) ([]float64, error) {\n\tresult := make([]float64, len(in))\n\tfor i, s := range in {\n\t\tr, err := Parse(s)\n\t\tif err != nil {\n\t\t\treturn []float64{}, err\n\t\t}\n\t\tresult[i] = r\n\t}\n\treturn result, nil\n}", "func (NilTimer) Percentile(p float64) float64 { return 0.0 }", "func ParseMetricString(metricString string) (*Metric, error) {\n\tvar metric = new(Metric)\n\tsampleRate := float64(1.0)\n\n\tdelimeters := [4]rune{\n\t\tRuneColon, // \":\"\n\t\tRunePipe, // \"|\"\n\t\tRunePipe, // \"|\"\n\t\tRuneAt, // \"@\"\n\t}\n\n\tvar metricStrings [5][MaximumMetricLength]rune\n\tdelimeterCount := 0\n\tmetricCount := 0\n\tvar metricRuneCount [5]int\n\tfor _, ch := range metricString {\n\t\tif ch == RuneSpace || ch == RuneNull {\n\t\t\tcontinue\n\t\t}\n\t\tif delimeterCount < len(delimeters) &&\n\t\t\tmetricCount < len(metricStrings)-1 &&\n\t\t\tch == delimeters[delimeterCount] {\n\t\t\tdelimeterCount++\n\t\t\tmetricCount++\n\t\t} else {\n\t\t\tmetricStrings[metricCount][metricRuneCount[metricCount]] = ch\n\t\t\tmetricRuneCount[metricCount]++\n\t\t}\n\t\tif metricRuneCount[metricCount] == MaximumMetricLength {\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif metricRuneCount[0] == 0 ||\n\t\tmetricRuneCount[1] == 0 ||\n\t\tmetricRuneCount[2] == 0 {\n\t\treturn metric, fmt.Errorf(\"Invalid data string, missing elements: '%s'\", metricString)\n\t}\n\n\tvalue, valueErr := strconv.ParseFloat(string(metricStrings[1][:metricRuneCount[1]]), 32)\n\tif valueErr != nil {\n\t\treturn metric, fmt.Errorf(\"Invalid data string, bad value: '%s'\", metricString)\n\t}\n\n\tmetricType, err := getMetricType(string(metricStrings[2][:metricRuneCount[2]]))\n\tif err != nil {\n\t\treturn metric, fmt.Errorf(\"Invalid data string, bad type: '%s'\", metricString)\n\t}\n\n\tsample, rateErr := strconv.ParseFloat(string(metricStrings[4][:metricRuneCount[4]]), 32)\n\tif rateErr == nil && sample > float64(0) && sample <= float64(1) {\n\t\tsampleRate = sample\n\t}\n\n\t// If a sample rate was applied, we inflate the hit count to extrapolate\n\t// the actual rate.\n\tif sampleRate < float64(1.0) {\n\t\tmetric.TotalHits = float64(1.0) / sampleRate\n\t} else {\n\t\tmetric.TotalHits = float64(1.0)\n\t}\n\n\t// The string was successfully parsed. Convert to a Metric structure.\n\tmetric.Key = string(metricStrings[0][:metricRuneCount[0]])\n\tmetric.MetricType = metricType\n\tmetric.LastValue = value\n\tmetric.AllValues = append(metric.AllValues, metric.LastValue)\n\tmetric.SampleRate = sampleRate\n\n\treturn metric, nil\n}", "func (src *prometheusMetricsSource) buildQuantiles(name string, m *dto.Metric, now int64, tags map[string]string) []*MetricPoint {\n\tvar result []*MetricPoint\n\tfor _, q := range m.GetSummary().Quantile {\n\t\tif !math.IsNaN(q.GetValue()) {\n\t\t\tpoint := src.metricPoint(name+\".\"+fmt.Sprint(q.GetQuantile()), float64(q.GetValue()), now, src.source, tags)\n\t\t\tresult = src.filterAppend(result, point)\n\t\t}\n\t}\n\tpoint := src.metricPoint(name+\".count\", float64(m.GetSummary().GetSampleCount()), now, src.source, tags)\n\tresult = src.filterAppend(result, point)\n\tpoint = src.metricPoint(name+\".sum\", float64(m.GetSummary().GetSampleSum()), now, src.source, tags)\n\tresult = src.filterAppend(result, point)\n\n\treturn result\n}", "func ParseSorts(request *http.Request, filterMapping map[string]string) []SortInfo {\n\tvar sortMap = make([]SortInfo, 0)\n\tquery := request.URL.Query().Get(\"sorts\")\n\tif query == \"\" {\n\t\treturn sortMap\n\t}\n\tsorts := strings.Split(query, \",\")\n\tfor _, sort := range sorts {\n\t\tif strings.HasPrefix(sort, \"-\") {\n\t\t\tvar column = strings.TrimPrefix(sort, \"-\")\n\t\t\tif v, ok := filterMapping[column]; ok {\n\t\t\t\tcolumn = v\n\t\t\t}\n\t\t\tsortMap = append(sortMap, SortInfo{\n\t\t\t\tKey: column,\n\t\t\t\tSort: Desc,\n\t\t\t})\n\t\t} else {\n\t\t\tvar column = strings.TrimPrefix(sort, \"+\")\n\t\t\tif v, ok := filterMapping[column]; ok {\n\t\t\t\tcolumn = v\n\t\t\t}\n\t\t\tsortMap = append(sortMap, SortInfo{\n\t\t\t\tKey: column,\n\t\t\t\tSort: Asc,\n\t\t\t})\n\t\t}\n\t}\n\treturn sortMap\n}", "func parsePacket(\n\tpacket string,\n) ([]Metric, error) {\n\tbits := strings.SplitN(packet, \":\", 2)\n\tif len(bits) != 2 {\n\t\tlog.Infof(\"Error: splitting ':', Unable to parse metric: %s\", packet)\n\t\treturn nil, fmt.Errorf(\"Error Parsing statsd packet\")\n\t}\n\n\tname := bits[0]\n\tmetadata := bits[1]\n\n\tdata := []string{}\n\tvar partialDatum string\n\tbrokensplit := strings.Split(metadata, \":\")\n\t// We need to fix the tag groups that got broken by the : split\n\tfor _, token := range brokensplit {\n\t\tif partialDatum == \"\" {\n\t\t\tpartialDatum = token\n\t\t} else if !strings.Contains(token, \"|\") || (strings.Contains(token, \"@\") && len(strings.Split(token, \"|\")) == 2) {\n\t\t\tpartialDatum += \":\" + token\n\t\t} else {\n\t\t\tdata = append(data, partialDatum)\n\t\t\tpartialDatum = token\n\t\t}\n\t}\n\tdata = append(data, partialDatum)\n\n\tmetrics := []Metric{}\n\tfor _, datum := range data {\n\t\tm := Metric{}\n\t\tm.Name = name\n\t\t// Validate splitting the bit on \"|\"\n\t\tpipesplit := strings.Split(datum, \"|\")\n\t\tif len(pipesplit) < 2 {\n\t\t\tlog.Infof(\"Error: splitting '|', Unable to parse metric: %s\", packet)\n\t\t\treturn nil, fmt.Errorf(\"Error parsing statsd packet\")\n\t\t}\n\n\t\t// Set allows value of strings.\n\t\tif pipesplit[1] != \"s\" {\n\t\t\tvalue, err := strconv.ParseFloat(pipesplit[0], 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"Error parsing value from packet %s: %s\", packet, err.Error())\n\t\t\t}\n\t\t\tm.Value = value\n\t\t}\n\n\t\t// Validate metric type\n\t\tswitch pipesplit[1] {\n\t\tcase \"c\":\n\t\t\tm.Type = \"counter\"\n\t\tcase \"g\":\n\t\t\tm.Type = \"bucketgauge\"\n\t\tcase \"s\":\n\t\t\tm.Type = \"set\"\n\t\t\tm.Value = util.Hash(pipesplit[0])\n\t\tcase \"ms\", \"h\":\n\t\t\tm.Type = \"histogram\"\n\t\tdefault:\n\t\t\tlog.Infof(\"Error: Statsd Metric type %s unsupported\", pipesplit[1])\n\t\t\treturn nil, fmt.Errorf(\"Error Parsing statsd line\")\n\t\t}\n\n\t\tfor _, segment := range pipesplit {\n\t\t\tif strings.Contains(segment, \"@\") && len(segment) > 1 {\n\t\t\t\tsamplerate, err := strconv.ParseFloat(segment[1:], 64)\n\t\t\t\tif err != nil || (samplerate < 0 || samplerate > 1) {\n\t\t\t\t\treturn nil, fmt.Errorf(\"Error: parsing sample rate, %s, it must be in format like: \"+\n\t\t\t\t\t\t\"@0.1, @0.5, etc. Ignoring sample rate for packet: %s\", err.Error(), packet)\n\t\t\t\t}\n\n\t\t\t\t// sample rate successfully parsed\n\t\t\t\tm.Samplerate = samplerate\n\t\t\t} else if len(segment) > 0 && segment[0] == '#' {\n\t\t\t\ttags := strings.Split(segment[1:], \",\")\n\t\t\t\tm.Hostname, m.DeviceName, m.Tags = extractMagicTags(tags)\n\t\t\t}\n\t\t}\n\n\t\tmetrics = append(metrics, m)\n\t}\n\n\treturn metrics, nil\n}", "func (fn *formulaFuncs) PERCENTILE(argsList *list.List) formulaArg {\n\tif argsList.Len() != 2 {\n\t\treturn newErrorFormulaArg(formulaErrorVALUE, \"PERCENTILE requires 2 arguments\")\n\t}\n\tarray := argsList.Front().Value.(formulaArg).ToList()\n\tk := argsList.Back().Value.(formulaArg).ToNumber()\n\tif k.Type != ArgNumber {\n\t\treturn k\n\t}\n\tif k.Number < 0 || k.Number > 1 {\n\t\treturn newErrorFormulaArg(formulaErrorNA, formulaErrorNA)\n\t}\n\tvar numbers []float64\n\tfor _, arg := range array {\n\t\tif arg.Type == ArgError {\n\t\t\treturn arg\n\t\t}\n\t\tif arg.Type == ArgNumber {\n\t\t\tnumbers = append(numbers, arg.Number)\n\t\t}\n\t}\n\tcnt := len(numbers)\n\tsort.Float64s(numbers)\n\tidx := k.Number * (float64(cnt) - 1)\n\tbase := math.Floor(idx)\n\tif idx == base {\n\t\treturn newNumberFormulaArg(numbers[int(idx)])\n\t}\n\tnext := base + 1\n\tproportion := math.Nextafter(idx, idx) - base\n\treturn newNumberFormulaArg(numbers[int(base)] + ((numbers[int(next)] - numbers[int(base)]) * proportion))\n}", "func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {\n\tq := vals[0].(Vector)[0].F\n\tinVec := vals[1].(Vector)\n\n\tif enh.signatureToMetricWithBuckets == nil {\n\t\tenh.signatureToMetricWithBuckets = map[string]*metricWithBuckets{}\n\t} else {\n\t\tfor _, v := range enh.signatureToMetricWithBuckets {\n\t\t\tv.buckets = v.buckets[:0]\n\t\t}\n\t}\n\n\tvar histogramSamples []Sample\n\n\tfor _, sample := range inVec {\n\t\t// We are only looking for conventional buckets here. Remember\n\t\t// the histograms for later treatment.\n\t\tif sample.H != nil {\n\t\t\thistogramSamples = append(histogramSamples, sample)\n\t\t\tcontinue\n\t\t}\n\n\t\tupperBound, err := strconv.ParseFloat(\n\t\t\tsample.Metric.Get(model.BucketLabel), 64,\n\t\t)\n\t\tif err != nil {\n\t\t\t// Oops, no bucket label or malformed label value. Skip.\n\t\t\t// TODO(beorn7): Issue a warning somehow.\n\t\t\tcontinue\n\t\t}\n\t\tenh.lblBuf = sample.Metric.BytesWithoutLabels(enh.lblBuf, labels.BucketLabel)\n\t\tmb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]\n\t\tif !ok {\n\t\t\tsample.Metric = labels.NewBuilder(sample.Metric).\n\t\t\t\tDel(excludedLabels...).\n\t\t\t\tLabels()\n\n\t\t\tmb = &metricWithBuckets{sample.Metric, nil}\n\t\t\tenh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb\n\t\t}\n\t\tmb.buckets = append(mb.buckets, bucket{upperBound, sample.F})\n\n\t}\n\n\t// Now deal with the histograms.\n\tfor _, sample := range histogramSamples {\n\t\t// We have to reconstruct the exact same signature as above for\n\t\t// a conventional histogram, just ignoring any le label.\n\t\tenh.lblBuf = sample.Metric.Bytes(enh.lblBuf)\n\t\tif mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]; ok && len(mb.buckets) > 0 {\n\t\t\t// At this data point, we have conventional histogram\n\t\t\t// buckets and a native histogram with the same name and\n\t\t\t// labels. Do not evaluate anything.\n\t\t\t// TODO(beorn7): Issue a warning somehow.\n\t\t\tdelete(enh.signatureToMetricWithBuckets, string(enh.lblBuf))\n\t\t\tcontinue\n\t\t}\n\n\t\tenh.Out = append(enh.Out, Sample{\n\t\t\tMetric: enh.DropMetricName(sample.Metric),\n\t\t\tF: histogramQuantile(q, sample.H),\n\t\t})\n\t}\n\n\tfor _, mb := range enh.signatureToMetricWithBuckets {\n\t\tif len(mb.buckets) > 0 {\n\t\t\tenh.Out = append(enh.Out, Sample{\n\t\t\t\tMetric: mb.metric,\n\t\t\t\tF: bucketQuantile(q, mb.buckets),\n\t\t\t})\n\t\t}\n\t}\n\n\treturn enh.Out\n}", "func (s *UniformSample) Percentile(p float64) float64 {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\treturn gometrics.SamplePercentile(s.values, p)\n}", "func NewPercentilesAxis() *Axis {\n\taxis := NewAxis()\n\taxis.Transform = NewPercentileTransform(5)\n\taxis.Ticks = ManualTicks{\n\t\t{Value: 0, Label: \"0\"},\n\t\t{Value: 0.25, Label: \"25\"},\n\t\t{Value: 0.5, Label: \"50\"},\n\t\t{Value: 0.75, Label: \"75\"},\n\t\t{Value: 0.9, Label: \"90\"},\n\t\t{Value: 0.99, Label: \"99\"},\n\t\t{Value: 0.999, Label: \"99.9\"},\n\t\t{Value: 0.9999, Label: \"99.99\"},\n\t\t{Value: 0.99999, Label: \"99.999\"}}\n\treturn axis\n}", "func defaultQuantileTypeStringFn(quantile float64) []byte {\n\tstr := strconv.FormatFloat(quantile*100, 'f', -1, 64)\n\tidx := strings.Index(str, \".\")\n\tif idx != -1 {\n\t\tstr = str[:idx] + str[idx+1:]\n\t}\n\treturn []byte(\"p\" + str)\n}", "func (app *AppCtx) ConvertPercentageStringToFloat(value string) (float64, error) {\n\tvar f float64\n\tvar err error\n\n\tif value[len(value)-1:] != \"%\" {\n\t\treturn -1.0, fmt.Errorf(\"expected PERCENTAGE value but got %s that did not end with '%%' character\", value)\n\t}\n\tif f, err = strconv.ParseFloat(value[0:len(value)-1], 64); err != nil {\n\t\treturn -1.0, fmt.Errorf(\"expected PERCENTAGE value but got %s with a non-numerical value before the %%\", value)\n\t}\n\tif f < 0 {\n\t\treturn -1.0, fmt.Errorf(\"expected PERCENTAGE value but got %s with a negative percentage\", value)\n\t}\n\tif f > 100 {\n\t\treturn -1.0, fmt.Errorf(\"expected PERCENTAGE value but got %s which is over 100%%\", value)\n\t}\n\treturn f, err\n}", "func HandlePercentile(w http.ResponseWriter, req *http.Request) {\n\tdecoder := json.NewDecoder(req.Body)\n\n\tvar numbers Numbers\n\n\terr := decoder.Decode(&numbers)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Prevent division by zero\n\tperc := Percentile(numbers)\n\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.WriteHeader(http.StatusOK)\n\n\tjsonRes, _ := json.Marshal(&Response{Result: perc})\n\n\tw.Write(jsonRes)\n}", "func parseRangeString(r string) (start, end, total int64) {\n\tfmt.Sscanf(r, \"bytes %d-%d/%d\", &start, &end, &total)\n\n\tif total != 0 && end > total {\n\t\tend = total\n\t}\n\tif start >= end {\n\t\tstart = 0\n\t\tend = 0\n\t}\n\n\treturn\n}", "func parseRangeSet(rs string) []Range {\n\tset := make([]Range, 0)\n\tfor _, s := range strings.Split(rs, \",\") {\n\t\tif len(s) == 0 {\n\t\t\treturn nil\n\t\t}\n\t\tp := strings.Split(s, \":\")\n\t\tif len(p) > 2 {\n\t\t\treturn nil\n\t\t}\n\t\tstart, err := strconv.ParseUint(p[0], 10, 32)\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\tend := uint64(0)\n\t\tinfinite := false\n\t\tif len(p) > 1 {\n\t\t\tif p[1] == \"*\" {\n\t\t\t\tinfinite = true\n\t\t\t} else {\n\t\t\t\tend, err = strconv.ParseUint(p[1], 10, 32)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tset = append(set, Range{uint32(start), uint32(end), infinite})\n\t}\n\treturn set\n}", "func parseMetricValue(value string) []*metricSample {\n\tsamples := make([]*metricSample, 0)\n\n\t// Checks for unit-less numerical value\n\tif n, err := parseNumber(value); err == nil {\n\t\tf := big.NewFloat(n)\n\n\t\tif f.IsInt() {\n\t\t\ti, _ := f.Int64()\n\t\t\tsamples = append(samples, &metricSample{Value: int(i)})\n\t\t} else {\n\t\t\tsamples = append(samples, &metricSample{Value: n})\n\t\t}\n\n\t\treturn samples\n\t}\n\n\tmatches := numWithUnitRegex.FindAllStringSubmatch(value, -1)\n\n\t// Checks if value doesn't match a numerical value w/ unit\n\tif len(matches) == 0 {\n\t\tsamples = append(samples, &metricSample{Value: value})\n\n\t\treturn samples\n\t}\n\n\tpv, _ := parseNumber(matches[0][1])\n\tpUnit := matches[0][3]\n\n\tsamples = append(samples, &metricSample{Value: pv, Unit: &pUnit})\n\n\tnv, nUnit := normalizeStorageSize(pv, pUnit)\n\n\tif nUnit != pUnit {\n\t\tsamples = append(samples, &metricSample{Value: nv, Unit: &nUnit})\n\t}\n\n\treturn samples\n}", "func PercentFromString(percentStr string) (percent float64, err error) {\n\tarray := strings.Fields(percentStr)\n\tif len(array) < 1 {\n\t\treturn percent, fmt.Errorf(\"Failed to parse %s\", percentStr)\n\t}\n\n\tpercent, err = strconv.ParseFloat(array[0], 64)\n\n\treturn\n}", "func (m *SnmpMetricCfg) GetMultiStringTagFieldMap() ([]*MetricMultiMap, error) {\n\tvar retval []*MetricMultiMap\n\n\titems := strings.Split(m.FieldName, \",\")\n\n\tfor _, v := range items {\n\t\titcfg := strings.Split(v, \"|\")\n\t\t// checklength\n\t\tiType := itcfg[0] // T/F\n\t\tif iType != \"T\" && iType != \"F\" {\n\t\t\tstr := fmt.Sprintf(\"MultiString Parse Config error on Metric %s Type %s is not of type (T=Tag ) or (F=Field)\", m.ID, itcfg)\n\t\t\tlog.Warnf(str)\n\t\t\treturn nil, errors.New(str)\n\t\t}\n\t\t// Name\n\t\tiName := itcfg[1] // name\n\t\t// Default Conversions\n\t\tvar iConv string\n\t\tif iType == \"T\" {\n\t\t\tiConv = \"STR\"\n\t\t}\n\t\tif iType == \"F\" {\n\t\t\tiConv = \"INT\"\n\t\t}\n\t\tif len(itcfg) > 2 && iType == \"F\" {\n\t\t\tswitch itcfg[2] {\n\t\t\tcase \"STR\":\n\t\t\tcase \"BL\":\n\t\t\tcase \"INT\":\n\t\t\tcase \"FP\":\n\t\t\tdefault:\n\t\t\t\tstr := fmt.Sprintf(\"MultiString Parse Config error on Metric %s Conversion Type (%s) for FIELD should be of type STR|INT|FP|BL\", m.ID, itcfg[2])\n\t\t\t\tlog.Errorf(str)\n\t\t\t\treturn nil, errors.New(str)\n\t\t\t}\n\t\t\tiConv = itcfg[2]\n\t\t}\n\t\tif len(itcfg) > 2 && iType == \"T\" {\n\t\t\tswitch itcfg[2] {\n\t\t\tcase \"STR\":\n\t\t\tdefault:\n\t\t\t\tstr := fmt.Sprintf(\"MultiString Parse Config error on Metric %s Conversion Type (%s) for TAG should be of type STR\", m.ID, itcfg[2])\n\t\t\t\tlog.Errorf(str)\n\t\t\t\treturn nil, errors.New(str)\n\t\t\t}\n\t\t\tiConv = itcfg[2]\n\t\t}\n\t\tretval = append(retval, &MetricMultiMap{IName: iName, IType: iType, IConv: iConv})\n\t\t// Could be null on Tag type\n\t}\n\treturn retval, nil\n}", "func extractPriority(rule *Rule) {\n\tcomponents := strings.Split(rule.Filter, \",\")\n\trule.Priority = 32768 // Default rule priority.\n\tfor _, component := range components {\n\t\tkeyvalue := strings.SplitN(component, \"=\", 2)\n\t\tif len(keyvalue) == 2 {\n\t\t\tkey := keyvalue[0]\n\t\t\tvalue := keyvalue[1]\n\t\t\tswitch key {\n\t\t\tcase \"priority\":\n\t\t\t\tpriority, err := strconv.ParseInt(value, 10, 32)\n\t\t\t\tif err == nil {\n\t\t\t\t\trule.Priority = int(priority)\n\t\t\t\t} else {\n\t\t\t\t\tlogging.GetLogger().Errorf(\"Error while parsing priority of rule: %s\", err.Error())\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func computePercentile(array []float64, targetPercentile int) float64 {\n\tif len(array) == 0 {\n\t\tpanic(\"Cannot compute the percentile of an empty array\")\n\t}\n\tsort.Float64s(array)\n\tif targetPercentile == 100 {\n\t\treturn array[len(array)-1]\n\t}\n\tindexAsFloat, fractional := math.Modf((float64(len(array)) - 1.0) * (0.01 * float64(targetPercentile)))\n\tindex := int(indexAsFloat)\n\tif len(array) == index+1 {\n\t\treturn array[index]\n\t}\n\treturn array[index]*(1-fractional) + array[index+1]*fractional\n}", "func parseInput(input string) []float64 {\n\tinput = strings.ReplaceAll(input, \"\\n\", \"\")\n\tinput = strings.ReplaceAll(input, \" \", \"\")\n\tinput = strings.ReplaceAll(input, \"\\t\", \"\")\n\targs := strings.Split(input, \",\")\n\n\tr := make([]float64, 0)\n\n\tfor _, arg := range args {\n\t\tif len(arg) > 0 {\n\t\t\tnum, err := strconv.ParseFloat(arg, 64)\n\t\t\tif err != nil {\n\t\t\t\tfmt.Printf(\"there was an error converting %s to a number: %s...\\n\", arg, err.Error())\n\t\t\t}\n\t\t\tr = append(r, num)\n\t\t}\n\t}\n\treturn r\n}", "func NewPercentParser(ptr *float64, def float64) PercentParser {\n\treturn PercentParser{\n\t\tFloat64: parsers.NewFloat64(ptr, def),\n\t}\n}", "func parseField(s string, min, max int) (field, error) {\n\ts = strings.TrimSpace(s)\n\tif s == \"\" {\n\t\treturn 0, errors.New(\"empty field\")\n\t}\n\n\tif s == \"*\" || s == \"?\" {\n\t\treturn star, nil\n\t}\n\n\tvar f field\n\t// parse single element or parse range (ex: '2' '1-5' '*/5' '1-30/2' )\n\t// determine start, end and interval. Construct bitmap by traversing from start-end with interval.\n\tfor _, part := range strings.Split(s, \",\") {\n\t\tvar (\n\t\t\terr error\n\t\t\tinterval = 1\n\t\t\tstartInterval, endInterval = min, max\n\t\t)\n\n\t\t// parse interval (ex: '*/5' '1-30/2') if exists\n\t\tif i := strings.IndexByte(part, '/'); i >= 0 {\n\t\t\tif r := part[:i]; r != \"*\" && r != \"?\" && strings.IndexByte(r, '-') < 0 {\n\t\t\t\treturn 0, fmt.Errorf(\"step given without range, expression %q\", s)\n\t\t\t}\n\n\t\t\tstep := part[i+1:]\n\t\t\tinterval, err = strconv.Atoi(step)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, fmt.Errorf(\"failed parsing interval expression %q: %s\", step, err)\n\t\t\t}\n\t\t\tpart = part[:i]\n\t\t}\n\n\t\tstart, end := part, part\n\t\t// parse range if exist\n\t\tif i := strings.IndexByte(part, '-'); i >= 0 {\n\t\t\tstart, end = part[:i], part[i+1:]\n\t\t}\n\n\t\t// determine start & end, some cron format use '?' instead of '*'\n\t\tif start != \"*\" && start != \"?\" {\n\t\t\tstartInterval, err = strconv.Atoi(start)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, fmt.Errorf(\"failed parsing expression %q: %s\", s, err)\n\t\t\t}\n\n\t\t\t// parse end interval if exists, else it will be same as start (single value)\n\t\t\tif end != \"\" {\n\t\t\t\tendInterval, err = strconv.Atoi(end)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn 0, fmt.Errorf(\"failed parsing expression %q: %s\", s, err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif startInterval < min || endInterval > max || startInterval > endInterval {\n\t\t\treturn 0, fmt.Errorf(\"value out of range (%d - %d): %s\", min, max, part)\n\t\t}\n\n\t\t// at this point we get the start, end, interval. Construct bitmap that represents possible values\n\t\tfor i := startInterval; i <= endInterval; i += interval {\n\t\t\tf |= 1 << uint64(i)\n\t\t}\n\t}\n\n\treturn f, nil\n}", "func splitIntoFloats(items []string) ([]float64, error) {\n\n\tvar floatList []float64\n\tfor _, item := range items {\n\t\tval, err := strconv.ParseFloat(strings.TrimSpace(item), 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tfloatList = append(floatList, val)\n\t}\n\treturn floatList, nil\n}", "func (v *Visibility) ParseVisibility(input []string) (tokensused int) {\n\tinputstring := strings.Join(input, \" \")\n\tmetric := regexp.MustCompile(`^(P|M)?(\\d{4})(\\s|$)((\\d{4})(NE|SE|NW|SW|N|E|S|W))?`)\n\t// In US and CA sector visibility reported in the remarks section. (as VIS NW-SE 1/2; VIS NE 2 1/2 etc)\n\timperial := regexp.MustCompile(`^(P|M)?(\\d{1,2}|\\d(\\s)?)?((\\d)/(\\d))?SM`)\n\n\tswitch {\n\tcase metric.MatchString(inputstring):\n\t\ttokensused = 1\n\t\tv.Distance.Unit = M\n\t\tmatches := metric.FindStringSubmatch(inputstring)\n\t\tv.BelowMin = matches[1] == \"M\"\n\t\tv.AboveMax = matches[1] == \"P\"\n\t\tv.Distance.Value, _ = strconv.Atoi(matches[2])\n\t\tif matches[4] != \"\" {\n\t\t\tv.LowerDistance.Value, _ = strconv.Atoi(matches[5])\n\t\t\tv.LowerDistance.Unit = M\n\t\t\tv.LowerDirection = matches[6]\n\t\t\ttokensused++\n\t\t}\n\tcase imperial.MatchString(inputstring):\n\t\ttokensused = 1\n\t\tmatches := imperial.FindStringSubmatch(inputstring)\n\t\tv.BelowMin = matches[1] == \"M\"\n\t\tv.AboveMax = matches[1] == \"P\"\n\t\tif matches[2] != \"\" {\n\t\t\tv.Distance.Value, _ = strconv.Atoi(strings.TrimSpace(matches[2]))\n\t\t}\n\t\tif matches[5] != \"\" && matches[6] != \"\" {\n\t\t\tnumerator, _ := strconv.Atoi(matches[5])\n\t\t\tdenominator, _ := strconv.Atoi(matches[6])\n\t\t\tif denominator != 0 {\n\t\t\t\tv.Distance.FractionValue += float64(numerator) / float64(denominator)\n\t\t\t}\n\t\t}\n\t\tv.Distance.Unit = SM\n\t\tif matches[3] == \" \" {\n\t\t\ttokensused++\n\t\t}\n\tdefault:\n\t\treturn\n\t}\n\treturn\n}", "func TestStat(t *testing.T) {\n\tcases := []Case{\n\t\t// metrics 2.0 cases with equals\n\t\t{\"foo.bar.unit=yes.baz\", \"prefix.\", \"\", \"ignored\", \"foo.bar.unit=yes.baz.stat=max_90\"},\n\t\t{\"foo.bar.unit=yes\", \"prefix.\", \"our=prefix.\", \"ignored\", \"our=prefix.foo.bar.unit=yes.stat=max_90\"},\n\t\t{\"unit=yes.foo.bar\", \"prefix.\", \"\", \"ignored\", \"unit=yes.foo.bar.stat=max_90\"},\n\t\t{\"mtype=count.foo.unit=ok.bar\", \"prefix.\", \"\", \"ignored\", \"mtype=count.foo.unit=ok.bar.stat=max_90\"},\n\t\t// metrics 2.0 cases without equals\n\t\t{\"foo.bar.unit_is_yes.baz\", \"prefix.\", \"ignored\", \"\", \"foo.bar.unit_is_yes.baz.stat_is_max_90\"},\n\t\t{\"foo.bar.unit_is_yes\", \"prefix.\", \"ignored\", \"our_is_prefix.\", \"our_is_prefix.foo.bar.unit_is_yes.stat_is_max_90\"},\n\t\t{\"unit_is_yes.foo.bar\", \"prefix.\", \"ignored\", \"\", \"unit_is_yes.foo.bar.stat_is_max_90\"},\n\t\t{\"mtype_is_count.foo.unit_is_ok.bar\", \"prefix.\", \"ignored\", \"\", \"mtype_is_count.foo.unit_is_ok.bar.stat_is_max_90\"},\n\t}\n\tfor _, c := range cases {\n\t\tassert.Equal(t, Max(c.in, c.p1, c.p2, c.p2ne, \"90\", \"\"), c.out)\n\t}\n\t// same but without percentile\n\tfor i, c := range cases {\n\t\tcases[i].out = strings.Replace(c.out, \"max_90\", \"max\", 1)\n\t}\n\tfor _, c := range cases {\n\t\tassert.Equal(t, Max(c.in, c.p1, c.p2, c.p2ne, \"\", \"\"), c.out)\n\t}\n}", "func parseMetricPathParameter(request *restful.Request) *dataselect.MetricQuery {\n\tmetricNamesParam := request.QueryParameter(\"metricNames\")\n\tvar metricNames []string\n\tif metricNamesParam != \"\" {\n\t\tmetricNames = strings.Split(metricNamesParam, \",\")\n\t} else {\n\t\tmetricNames = nil\n\t}\n\taggregationsParam := request.QueryParameter(\"aggregations\")\n\tvar rawAggregations []string\n\tif aggregationsParam != \"\" {\n\t\trawAggregations = strings.Split(aggregationsParam, \",\")\n\t} else {\n\t\trawAggregations = nil\n\t}\n\taggregationModes := metricapi.AggregationModes{}\n\tfor _, e := range rawAggregations {\n\t\taggregationModes = append(aggregationModes, metricapi.AggregationMode(e))\n\t}\n\treturn dataselect.NewMetricQuery(metricNames, aggregationModes)\n\n}", "func ParseSortParameter(sortval string) ([]string, []string) {\n\n\tsortNames := make([]string, 0)\n\tsortOrder := make([]string, 0)\n\n\tif len(sortval) == 0 {\n\t\treturn sortNames, sortOrder\n\t}\n\n\tvalues := strings.Split(sortval, \",\")\n\n\tfor _, v := range values {\n\n\t\tif len(v) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tvar name string\n\t\tvar order string\n\n\t\tif v[0] == '-' {\n\t\t\tname = v[1:]\n\t\t\torder = \"desc\"\n\t\t} else {\n\t\t\tname = v\n\t\t\torder = \"asc\"\n\t\t}\n\n\t\tsortNames = append(sortNames, name)\n\t\tsortOrder = append(sortOrder, order)\n\t}\n\n\treturn sortNames, sortOrder\n}", "func (m MigProfile) Parse() (int, int, int, error) {\n\terr := m.AssertValid()\n\tif err != nil {\n\t\treturn -1, -1, -1, fmt.Errorf(\"invalid MigProfile: %v\", err)\n\t}\n\n\tvar c, g, gb int\n\tn, _ := fmt.Sscanf(string(m), \"%dc.%dg.%dgb\", &c, &g, &gb)\n\tif n == 3 {\n\t\treturn c, g, gb, nil\n\t}\n\n\tn, _ = fmt.Sscanf(string(m), \"%dg.%dgb\", &g, &gb)\n\tif n == 2 {\n\t\treturn g, g, gb, nil\n\t}\n\n\treturn -1, -1, -1, fmt.Errorf(\"parsed wrong number of values, expected 2 or 3\")\n}", "func ParseFeatureMap(str string) (FeatureMap, error) {\n\t// Create the feature map we will be returning.\n\tres := make(FeatureMap, len(defaultFeatureMap))\n\t// Set all features to their default status.\n\tfor feature, status := range defaultFeatureMap {\n\t\tres[feature] = status\n\t}\n\t// Split the provided string by \",\" in order to obtain all the \"key=value\" pairs.\n\tkvs := strings.Split(str, \",\")\n\t// Iterate over all the \"key=value\" pairs and set the status of the corresponding feature in the feature map.\n\tfor _, kv := range kvs {\n\t\t// Skip \"empty\" key/value pairs.\n\t\tif len(kv) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\t// Split the key/value pair by \"=\".\n\t\tp := strings.Split(kv, \"=\")\n\t\tif len(p) != 2 {\n\t\t\treturn nil, fmt.Errorf(\"invalid key/value pair: %q\", kv)\n\t\t}\n\t\t// Grab the key and its value.\n\t\tk, v := p[0], p[1]\n\t\t// Make sure the feature corresponding to the key exists.\n\t\tif _, exists := defaultFeatureMap[Feature(k)]; !exists {\n\t\t\treturn nil, fmt.Errorf(\"invalid feature key: %q\", k)\n\t\t}\n\t\t// Attempt to parse the value as a boolean.\n\t\tb, err := strconv.ParseBool(v)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to parse %q as a boolean value\", v)\n\t\t}\n\t\t// Set the feature's status in the feature map.\n\t\tres[Feature(k)] = b\n\t}\n\t// Return the feature map.\n\treturn res, nil\n}", "func parseFloats(str string) ([]float64, error) {\n\tvar (\n\t\tfields = strings.Fields(str)\n\t\tfloat_parameters = make([]float64, len(fields))\n\t\terr error\n\t)\n\tfor i, v := range fields {\n\t\tfloat_parameters[i], err = strconv.ParseFloat(v, 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn float_parameters, nil\n}", "func (f *CronField) parsePossibleValues(s string) (vals []int, retErr error) {\n\tdefer func() {\n\t\tif retErr == nil && len(vals) == 0 {\n\t\t\tretErr = fmt.Errorf(\"No possible values\")\n\t\t}\n\t}()\n\n\tcommaParts := strings.Split(s, \",\")\n\tfor _, part := range commaParts {\n\t\tif ma := rangRe.FindStringSubmatch(part); len(ma) == 4 || len(ma) == 6 {\n\t\t\tif ma[1] == \"*\" { // \"*\" means first-last\n\t\t\t\tma[2] = strconv.Itoa(f.Min)\n\t\t\t\tma[3] = strconv.Itoa(f.Max)\n\t\t\t}\n\t\t\tstep := \"\"\n\t\t\tif len(ma) == 6 {\n\t\t\t\tstep = ma[5]\n\t\t\t}\n\n\t\t\tv, err := f.parseRange(ma[2], ma[3], step)\n\t\t\tif err != nil {\n\t\t\t\tretErr = err\n\t\t\t\treturn\n\t\t\t}\n\t\t\tvals = append(vals, v...)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Single digit without range\n\t\tresult, err := f.parseNumeric(part)\n\t\tif err != nil {\n\t\t\tretErr = err\n\t\t\treturn\n\t\t}\n\n\t\tvals = append(vals, result)\n\t}\n\n\treturn\n}", "func PercentilesPlot() {\n\tfmt.Println(\"Percentiles Plot (percentiles.svg)\")\n\n\tbench := hrtime.NewBenchmark(N)\n\tfor bench.Next() {\n\t\ttime.Sleep(5000 * time.Nanosecond)\n\t}\n\n\tseconds := plot.DurationToSeconds(bench.Laps())\n\n\tp := plot.New()\n\tp.Margin = plot.R(5, 0, 0, 5)\n\tp.X = plot.NewPercentilesAxis()\n\tp.AddGroup(\n\t\tplot.NewGrid(),\n\t\tplot.NewGizmo(),\n\t\tplot.NewPercentiles(\"\", seconds),\n\t\tplot.NewTickLabels(),\n\t)\n\n\tsvg := plot.NewSVG(800, 300)\n\tp.Draw(svg)\n\tioutil.WriteFile(\"percentiles.svg\", svg.Bytes(), 0755)\n}", "func NewGrouping(arg string) *Grouping {\n\tsplit := strings.Split(arg, \",\")\n\tvar grouping *Grouping\n\tif len(split) > 0 {\n\t\tgrouping = &Grouping{\n\t\t\tName: split[0],\n\t\t}\n\t}\n\tif len(split) >= 2 {\n\t\tindex, _ := strconv.ParseInt(split[1], 0, 64)\n\t\tgrouping.Index = int(index)\n\t}\n\tif len(split) >= 3 {\n\t\tduration, _ := time.ParseDuration(split[2])\n\t\tgrouping.Max = duration\n\t}\n\treturn grouping\n}", "func parseLimitString(limitString string) (limit ipamTypes.Limits, err error) {\n\tintSlice := make([]int, 3)\n\tstringSlice := strings.Split(strings.ReplaceAll(limitString, \" \", \"\"), \",\")\n\tif len(stringSlice) != 3 {\n\t\treturn limit, fmt.Errorf(\"invalid limit value\")\n\t}\n\tfor i, s := range stringSlice {\n\t\tintLimit, err := strconv.Atoi(s)\n\t\tif err != nil {\n\t\t\treturn limit, err\n\t\t}\n\t\tintSlice[i] = intLimit\n\t}\n\treturn ipamTypes.Limits{Adapters: intSlice[0], IPv4: intSlice[1], IPv6: intSlice[2]}, nil\n}", "func ParseFloat(s string, base int, prec uint, mode big.RoundingMode,) (*big.Float, int, error)", "func (m *Metric) Parse(output string) {\n\tmessage := \"\"\n\tparts := strings.SplitN(output, \"|\", 4)\n\ttags := make([]string, 0)\n\tif len(parts) > 2 {\n\t\tmessage = strings.TrimSpace(parts[2])\n\t\ttags = strings.SplitAfter(parts[3], \" \")\n\t}\n\tvalue, _ := strconv.ParseFloat(strings.TrimSpace(parts[1]), 64)\n\tm.Type = strings.TrimSpace(parts[0])\n\tm.Value = value\n\tm.Message = message\n\tm.Tags = tags\n}", "func NewSummaryValueAtPercentileSlice() SummaryValueAtPercentileSlice {\n\torig := []*otlpmetrics.SummaryDataPoint_ValueAtPercentile(nil)\n\treturn SummaryValueAtPercentileSlice{&orig}\n}", "func (m *redisMetric) parseMetric(strVal string, t *timeBundle) (pdata.Metric, error) {\n\tvar err error\n\tpdm := pdata.NewMetric()\n\tswitch m.pdType {\n\tcase pdata.MetricDataTypeIntSum:\n\t\tvar val int64\n\t\tval, err = strToInt64Point(strVal)\n\t\tif err != nil {\n\t\t\treturn pdm, err\n\t\t}\n\t\tinitIntMetric(m, val, t, pdm)\n\tcase pdata.MetricDataTypeIntGauge:\n\t\tvar val int64\n\t\tval, err = strToInt64Point(strVal)\n\t\tif err != nil {\n\t\t\treturn pdm, err\n\t\t}\n\t\tinitIntMetric(m, val, t, pdm)\n\tcase pdata.MetricDataTypeDoubleSum:\n\t\tvar val float64\n\t\tval, err = strToDoublePoint(strVal)\n\t\tif err != nil {\n\t\t\treturn pdm, err\n\t\t}\n\t\tinitDoubleMetric(m, val, t, pdm)\n\tcase pdata.MetricDataTypeDoubleGauge:\n\t\tvar val float64\n\t\tval, err = strToDoublePoint(strVal)\n\t\tif err != nil {\n\t\t\treturn pdm, err\n\t\t}\n\t\tinitDoubleMetric(m, val, t, pdm)\n\t}\n\treturn pdm, nil\n}", "func parseMetricPathParameter(c *gin.Context) *dataselect.MetricQuery {\n\tmetricNamesParam := c.Query(\"metricNames\")\n\tvar metricNames []string\n\tif metricNamesParam != \"\" {\n\t\tmetricNames = strings.Split(metricNamesParam, \",\")\n\t} else {\n\t\tmetricNames = nil\n\t}\n\taggregationsParam := c.Query(\"aggregations\")\n\tvar rawAggregations []string\n\tif aggregationsParam != \"\" {\n\t\trawAggregations = strings.Split(aggregationsParam, \",\")\n\t} else {\n\t\trawAggregations = nil\n\t}\n\taggregationModes := metricapi.AggregationModes{}\n\tfor _, e := range rawAggregations {\n\t\taggregationModes = append(aggregationModes, metricapi.AggregationMode(e))\n\t}\n\treturn dataselect.NewMetricQuery(metricNames, aggregationModes)\n\n}", "func ParseKubeletMetrics(metricsBlob string) ([]KubeletMetric, error) {\n\tmetric := make([]KubeletMetric, 0)\n\tfor _, line := range strings.Split(metricsBlob, \"\\n\") {\n\n\t\t// A kubelet stats line starts with the KubeletSubsystem marker, followed by a stat name, followed by fields\n\t\t// that vary by stat described on a case by case basis below.\n\t\t// TODO: String parsing is such a hack, but getting our rest client/proxy to cooperate with prometheus\n\t\t// client is weird, we should eventually invest some time in doing this the right way.\n\t\tif !strings.HasPrefix(line, fmt.Sprintf(\"%v_\", metrics.KubeletSubsystem)) {\n\t\t\tcontinue\n\t\t}\n\t\tkeyVal := strings.Split(line, \" \")\n\t\tif len(keyVal) != 2 {\n\t\t\treturn nil, fmt.Errorf(\"Error parsing metric %q\", line)\n\t\t}\n\t\tkeyElems := strings.Split(line, \"\\\"\")\n\n\t\tlatency, err := strconv.ParseFloat(keyVal[1], 64)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tmethodLine := strings.Split(keyElems[0], \"{\")\n\t\tmethodList := strings.Split(methodLine[0], \"_\")\n\t\tif len(methodLine) != 2 || len(methodList) == 1 {\n\t\t\tcontinue\n\t\t}\n\t\tmethod := strings.Join(methodList[1:], \"_\")\n\n\t\tvar operation, rawQuantile string\n\t\tvar quantile float64\n\n\t\tswitch method {\n\t\tcase metrics.PodWorkerLatencyKey:\n\t\t\t// eg: kubelet_pod_worker_latency_microseconds{operation_type=\"create\",pod_name=\"foopause3_default\",quantile=\"0.99\"} 1344\n\t\t\tif len(keyElems) != 7 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\toperation = keyElems[1]\n\t\t\trawQuantile = keyElems[5]\n\t\t\tbreak\n\n\t\tcase metrics.PodWorkerStartLatencyKey:\n\t\t\t// eg: kubelet_pod_worker_start_latency_microseconds{quantile=\"0.99\"} 12\n\t\t\tfallthrough\n\n\t\tcase metrics.SyncPodsLatencyKey:\n\t\t\t// eg: kubelet_sync_pods_latency_microseconds{quantile=\"0.5\"} 9949\n\t\t\tfallthrough\n\n\t\tcase metrics.PodStartLatencyKey:\n\t\t\t// eg: kubelet_pod_start_latency_microseconds{quantile=\"0.5\"} 123\n\t\t\tfallthrough\n\n\t\tcase metrics.PodStatusLatencyKey:\n\t\t\t// eg: kubelet_generate_pod_status_latency_microseconds{quantile=\"0.5\"} 12715\n\t\t\tif len(keyElems) != 3 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\toperation = \"\"\n\t\t\trawQuantile = keyElems[1]\n\t\t\tbreak\n\n\t\tcase metrics.ContainerManagerOperationsKey:\n\t\t\t// eg: kubelet_container_manager_latency_microseconds{operation_type=\"SyncPod\",quantile=\"0.5\"} 6705\n\t\t\tfallthrough\n\n\t\tcase metrics.DockerOperationsKey:\n\t\t\t// eg: kubelet_docker_operations_latency_microseconds{operation_type=\"info\",quantile=\"0.5\"} 31590\n\t\t\tif len(keyElems) != 5 {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\toperation = keyElems[1]\n\t\t\trawQuantile = keyElems[3]\n\t\t\tbreak\n\n\t\tcase metrics.DockerErrorsKey:\n\t\t\tLogf(\"ERROR %v\", line)\n\n\t\tdefault:\n\t\t\tcontinue\n\t\t}\n\t\tquantile, err = strconv.ParseFloat(rawQuantile, 64)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\t\tmetric = append(metric, KubeletMetric{operation, method, quantile, time.Duration(int64(latency)) * time.Microsecond})\n\t}\n\treturn metric, nil\n}", "func BenchmarkParser(b *testing.B) {\n\texpr := \"n%10==1&&n%100!=11?0:n%10>=2&&n%10<=4&&(n%100<10||n%100>=20)?1:2\"\n\tfor i := 0; i < b.N; i++ {\n\t\t_, err := parse(expr)\n\t\tif err != nil {\n\t\t\tb.Fatal(err)\n\t\t}\n\t}\n}", "func FloatParse(z *big.Float, s string, base int) (*big.Float, int, error)", "func SplitPercent(p int) SplitOption {\n\treturn splitOption(func(opts *options) error {\n\t\tif min, max := 0, 100; p <= min || p >= max {\n\t\t\treturn fmt.Errorf(\"invalid split percentage %d, must be in range %d < p < %d\", p, min, max)\n\t\t}\n\t\topts.splitPercent = p\n\t\treturn nil\n\t})\n}", "func ParseAggregator(aggregator string) (AggregatorFunc, error) {\n\tswitch aggregator {\n\tcase \"avg\":\n\t\treturn Average, nil\n\tcase \"min\":\n\t\treturn Minimum, nil\n\tcase \"max\":\n\t\treturn Maximum, nil\n\tcase \"sum\":\n\t\treturn Sum, nil\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"aggregator function: %s is unknown\", aggregator)\n\t}\n}", "func ProcessMetric(metric *Metric, flushDuration time.Duration, quantiles []int, logger Logger) {\n\tflushInterval := flushDuration / time.Second\n\n\tsort.Sort(metric.AllValues)\n\tswitch metric.MetricType {\n\tcase MetricTypeCounter:\n\t\tmetric.ValuesPerSecond = metric.LastValue / float64(flushInterval)\n\tcase MetricTypeGauge:\n\t\tmetric.MedianValue = metric.AllValues.Median()\n\t\tmetric.MeanValue = metric.AllValues.Mean()\n\tcase MetricTypeSet:\n\t\tmetric.LastValue = float64(metric.AllValues.UniqueCount())\n\tcase MetricTypeTimer:\n\t\tmetric.MinValue, metric.MaxValue, _ = metric.AllValues.Minmax()\n\t\tmetric.MedianValue = metric.AllValues.Median()\n\t\tmetric.MeanValue = metric.AllValues.Mean()\n\t\tmetric.ValuesPerSecond = metric.TotalHits / float64(flushInterval)\n\n\t\tmetric.Quantiles = make([]MetricQuantile, 0)\n\t\tfor _, q := range quantiles {\n\t\t\tpercentile := float64(q) / float64(100)\n\t\t\tquantile := new(MetricQuantile)\n\t\t\tquantile.Quantile = q\n\n\t\t\t// Make calculations based on the desired quantile.\n\t\t\tquantile.Boundary = metric.AllValues.Quantile(percentile)\n\t\t\tfor _, value := range metric.AllValues {\n\t\t\t\tif value > quantile.Boundary {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tquantile.AllValues = append(quantile.AllValues, value)\n\t\t\t}\n\t\t\t_, quantile.Max, _ = quantile.AllValues.Minmax()\n\t\t\tquantile.Mean = quantile.AllValues.Mean()\n\t\t\tquantile.Median = quantile.AllValues.Median()\n\t\t\tquantile.Sum = quantile.AllValues.Sum()\n\t\t\tmetric.Quantiles = append(metric.Quantiles, *quantile)\n\t\t}\n\t}\n}", "func BenchmarkParse(b *testing.B) {\n\tfor i := 0; i < b.N; i++ {\n\t\tParse(i)\n\t}\n}", "func quantile(vals []float64, q float64) float64 {\n\tlength := len(vals)\n\tif length == 0 {\n\t\treturn math.NaN()\n\t}\n\tindex := int(q * float64(length-1))\n\treturn vals[index]\n}", "func parseRange(s string, size int64) ([]httpRange, error) {\n\tif s == \"\" {\n\t\treturn nil, nil // header not present\n\t}\n\tconst b = \"bytes=\"\n\tif !strings.HasPrefix(s, b) {\n\t\treturn nil, errors.New(\"invalid range\")\n\t}\n\tvar ranges []httpRange\n\tfor _, ra := range strings.Split(s[len(b):], \",\") {\n\t\tra = strings.TrimSpace(ra)\n\t\tif ra == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\ti := strings.Index(ra, \"-\")\n\t\tif i < 0 {\n\t\t\treturn nil, errors.New(\"invalid range\")\n\t\t}\n\t\tstart, end := strings.TrimSpace(ra[:i]), strings.TrimSpace(ra[i+1:])\n\t\tvar r httpRange\n\t\tif start == \"\" {\n\t\t\t// If no start is specified, end specifies the\n\t\t\t// range start relative to the end of the file.\n\t\t\ti, err := strconv.ParseInt(end, 10, 64)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errors.New(\"invalid range\")\n\t\t\t}\n\t\t\tif i > size {\n\t\t\t\ti = size\n\t\t\t}\n\t\t\tr.start = size - i\n\t\t\tr.length = size - r.start\n\t\t} else {\n\t\t\ti, err := strconv.ParseInt(start, 10, 64)\n\t\t\tif err != nil || i > size || i < 0 {\n\t\t\t\treturn nil, errors.New(\"invalid range\")\n\t\t\t}\n\t\t\tr.start = i\n\t\t\tif end == \"\" {\n\t\t\t\t// If no end is specified, range extends to end of the file.\n\t\t\t\tr.length = size - r.start\n\t\t\t} else {\n\t\t\t\ti, err := strconv.ParseInt(end, 10, 64)\n\t\t\t\tif err != nil || r.start > i {\n\t\t\t\t\treturn nil, errors.New(\"invalid range\")\n\t\t\t\t}\n\t\t\t\tif i >= size {\n\t\t\t\t\ti = size - 1\n\t\t\t\t}\n\t\t\t\tr.length = i - r.start + 1\n\t\t\t}\n\t\t}\n\t\tranges = append(ranges, r)\n\t}\n\treturn ranges, nil\n}", "func poolMetricParser(stdout []byte) *poolfields {\n\tif len(string(stdout)) == 0 {\n\t\tpool := poolfields{\n\t\t\tname: os.Getenv(\"HOSTNAME\"),\n\t\t\tzpoolLastSyncTime: zpool.ZpoolLastSyncCommandErrorOrUnknownUnset,\n\t\t\tzpoolLastSyncTimeCommandError: zpool.ZpoolLastSyncCommandErrorOrUnknownUnset,\n\t\t\tzpoolStateUnknown: zpool.ZpoolLastSyncCommandErrorOrUnknownSet,\n\t\t}\n\t\treturn &pool\n\t}\n\n\tpools := strings.Split(string(stdout), \"\\n\")\n\tf := strings.Fields(pools[0])\n\tif len(f) < 2 {\n\t\treturn nil\n\t}\n\n\tpool := poolfields{\n\t\tname: f[0],\n\t\tzpoolLastSyncTime: poolSyncTimeParseFloat64(f[2]),\n\t\tzpoolStateUnknown: zpool.ZpoolLastSyncCommandErrorOrUnknownUnset,\n\t\tzpoolLastSyncTimeCommandError: zpool.ZpoolLastSyncCommandErrorOrUnknownUnset,\n\t}\n\n\treturn &pool\n}", "func parseFloats(in []string) ([]float64, error) {\n\tout := make([]float64, len(in))\n\tfor i := range in {\n\t\tval, err := strconv.ParseFloat(in[i], 64)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tout[i] = val\n\t}\n\treturn out, nil\n}", "func ParseMetric(rawMetric string) Metric {\n\tmetricNameAndRest := strings.SplitN(rawMetric, \":\", 2)\n\tname := metricNameAndRest[0]\n\tvalueAndTags := strings.SplitN(metricNameAndRest[1], \"#\", 2)\n\tvalue := valueAndTags[0]\n\ttags := \"\"\n\n\tif len(valueAndTags) > 1 {\n\t\ttags = valueAndTags[1]\n\t\ttags = strings.Replace(tags, \",\", \" \", -1)\n\t}\n\n\treturn Metric{Name: name, Value: value, Tags: tags}\n}", "func parse(s string, unit map[string]float64) (int64, error) {\n\tvar size int64\n\tvar ok bool\n\tfor {\n\t\ts = strings.TrimSpace(s)\n\t\tm := sizeRE.FindStringSubmatch(s)\n\t\tif m == nil {\n\t\t\tbreak\n\t\t}\n\t\tv, err := strconv.ParseFloat(m[1], 64)\n\t\tif err != nil {\n\t\t\treturn 0, fmt.Errorf(\"sizeflag: invalid size %q\", m[0])\n\t\t\t// Should not be structurally possible, though.\n\t\t}\n\t\tif mul, ok := unit[strings.ToLower(m[2])]; ok {\n\t\t\tv *= mul\n\t\t} else {\n\t\t\treturn size, fmt.Errorf(\"sizeflag: invalid unit %q\", m[2])\n\t\t}\n\t\tsize += int64(v)\n\t\ts = s[len(m[0]):]\n\t\tok = true\n\t}\n\tif s = strings.TrimSpace(s); s != \"\" {\n\t\tv, err := strconv.ParseInt(s, 10, 64)\n\t\tif err != nil {\n\t\t\treturn 0, fmt.Errorf(\"sizeflag: invalid size %q\", s)\n\t\t}\n\t\tsize += v\n\t} else if !ok {\n\t\treturn 0, fmt.Errorf(\"sizeflag: invalid size %q\", s)\n\t}\n\treturn size, nil\n}", "func decodeSortArgs(s string) []string {\n\t// FIXME parse $natural key\n\treturnValue := []string{}\n\tglob := new(map[string]interface{})\n\tif len(s) != 0 {\n\t\terr := json.Unmarshal([]byte(s), glob)\n\t\tif err != nil {\n\t\t\treturn []string{\"\"}\n\t\t}\n\t\tfor k, v := range *glob {\n\t\t\ti := v.(float64)\n\t\t\tif int(i) < 0 {\n\t\t\t\tk := \"-\" + k\n\t\t\t\treturnValue = append(returnValue, k)\n\t\t\t} else {\n\t\t\t\treturnValue = append(returnValue, k)\n\t\t\t}\n\t\t}\n\t}\n\treturn returnValue\n}", "func ProgressStringToValue(progressPercentage string) int64 {\n\tvalue := int64(0)\n\tr := regexp.MustCompile(`(\\d+)\\%`)\n\tmatched := r.FindStringSubmatch(progressPercentage)\n\tif len(matched) == 2 {\n\t\tif v, err := strconv.ParseInt(matched[1], 10, 64); err == nil {\n\t\t\tvalue = v\n\t\t}\n\t}\n\treturn value\n}", "func (ms SummaryDataPoint) ValueAtPercentiles() SummaryValueAtPercentileSlice {\n\treturn newSummaryValueAtPercentileSlice(&(*ms.orig).PercentileValues)\n}", "func parseRange(s string, size int64) ([]httpRange, error) {\n if s == \"\" {\n return nil, nil // header not present\n }\n const b = \"bytes=\"\n if !strings.HasPrefix(s, b) {\n return nil, errors.New(\"invalid range\")\n }\n var ranges []httpRange\n for _, ra := range strings.Split(s[len(b):], \",\") {\n ra = strings.TrimSpace(ra)\n if ra == \"\" {\n continue\n }\n i := strings.Index(ra, \"-\")\n if i < 0 {\n return nil, errors.New(\"invalid range\")\n }\n start, end := strings.TrimSpace(ra[:i]), strings.TrimSpace(ra[i+1:])\n var r httpRange\n if start == \"\" {\n // If no start is specified, end specifies the\n // range start relative to the end of the file.\n i, err := strconv.ParseInt(end, 10, 64)\n if err != nil {\n return nil, errors.New(\"invalid range\")\n }\n if i > size {\n i = size\n }\n r.start = size - i\n r.length = size - r.start\n } else {\n i, err := strconv.ParseInt(start, 10, 64)\n if err != nil || i > size || i < 0 {\n return nil, errors.New(\"invalid range\")\n }\n r.start = i\n if end == \"\" {\n // If no end is specified, range extends to end of the file.\n r.length = size - r.start\n } else {\n i, err := strconv.ParseInt(end, 10, 64)\n if err != nil || r.start > i {\n return nil, errors.New(\"invalid range\")\n }\n if i >= size {\n i = size - 1\n }\n r.length = i - r.start + 1\n }\n }\n ranges = append(ranges, r)\n }\n return ranges, nil\n}", "func parsePrimeListCSV(primeChan chan int) {\n\tdefer close(primeChan)\n\t//open file logic\n\topenFile, err := os.Open(\"list/list.prime\")\n\tcheckError(\"Failed to open prime list file. \", err)\n\tdefer openFile.Close()\n\n\treader := csv.NewReader(bufio.NewReader(openFile))\n\tfor {\n\t\tstringSlice, error := reader.Read()\n\t\tif error == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tfor i := 0; i < len(stringSlice); i++ {\n\t\t\tprime, _ := strconv.Atoi(stringSlice[i])\n\t\t\tprimeChan <- prime\n\t\t}\n\t}\n}", "func promGroupTags(args []parse.Node) (parse.Tags, error) {\n\ttags := make(parse.Tags)\n\tcsvTags := strings.Split(args[1].(*parse.StringNode).Text, \",\")\n\tfor _, k := range csvTags {\n\t\ttags[k] = struct{}{}\n\t}\n\treturn tags, nil\n}", "func QuartileRange(values []float64) (float64, float64) {\n\tsorted := sortFloat64s(values)\n\treturn percentile_(sorted, 25.0), percentile_(sorted, 75.0)\n}", "func ParseQueryString(values map[string][]string) *NgGrid {\n\tg := &NgGrid{}\n\tif value := values[QS_SORT_DIRECTION]; len(value) != 0 {\n\t\tg.SortDirection = value[0]\n\t}\n\tif value := values[QS_SORT_FIELD]; len(value) != 0 {\n\t\tg.SortField = value[0]\n\t}\n\tif value := values[QS_QUERY]; len(value) != 0 {\n\t\tg.Query = value[0]\n\t}\n\tif value := values[QS_PAGE_NUMBER]; len(value) != 0 {\n\t\tpn, err := strconv.Atoi(value[0])\n\t\tif err == nil {\n\t\t\tg.PageNumber = int64(pn)\n\t\t}\n\t}\n\tif value := values[QS_PAGE_SIZE]; len(value) != 0 {\n\t\tpn, err := strconv.Atoi(value[0])\n\t\tif err == nil {\n\t\t\tg.PageSize = int64(pn)\n\t\t}\n\t}\n\n\tif g.PageNumber < 1 {\n\t\tg.PageNumber = 1\n\t}\n\n\treturn g\n}", "func parseFilterLength(length interface{}) uint {\n\tvar (\n\t\tret uint\n\t\tdefaultLength = 8*1024*1024*500 - 1 // 500MB\n\t)\n\tswitch length.(type) {\n\tcase int:\n\t\tif (length.(int)) <= 0 {\n\t\t\tret = uint(defaultLength)\n\t\t} else {\n\t\t\tret = uint(length.(int))\n\t\t}\n\tcase string:\n\t\tstr := length.(string)\n\t\tstrlen := len(str)\n\t\tneg := str[strlen-2:]\n\t\tstr = str[:strlen-2]\n\t\tif n, err := strconv.Atoi(str); err != nil {\n\t\t\tret = uint(defaultLength)\n\t\t} else {\n\t\t\tswitch neg {\n\t\t\tcase \"kb\":\n\t\t\t\tret = uint(8 * 1024 * n)\n\t\t\tcase \"KB\":\n\t\t\t\tret = uint(8 * 1024 * n)\n\t\t\tcase \"mb\":\n\t\t\t\tret = uint(8 * 1024 * 1024 * n)\n\t\t\tcase \"MB\":\n\t\t\t\tret = uint(8 * 1024 * 1024 * n)\n\t\t\tcase \"gb\":\n\t\t\t\tret = uint(8 * 1024 * 1024 * 1024 * n)\n\t\t\tcase \"GB\":\n\t\t\t\tret = uint(8 * 1024 * 1024 * 1024 * n)\n\t\t\tdefault:\n\t\t\t\tret = uint(defaultLength)\n\t\t\t}\n\t\t}\n\tdefault:\n\t\tret = uint(defaultLength)\n\t}\n\treturn ret\n}", "func (sh *StatsHandler) parseStatsFilter(vals url.Values) error {\n\tvar err error\n\n\terr = sh.ParseCommonFilter(vals)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsh.filter.GroupBy, err = sh.readGroupBy(vals.Get(\"group_by\"))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsh.filter.Limit, err = sh.readInt(vals.Get(\"limit\"), 1, statsAPIMaxLimit, statsAPIMaxLimit)\n\tif err != nil {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: errors.Wrap(err, \"invalid limit\").Error(),\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\t// Add 1 for pagination\n\tsh.filter.Limit++\n\n\tsh.filter.Tasks = sh.readStringList(vals[\"tasks\"])\n\tif len(sh.filter.Tasks) > statsAPIMaxNumTasks {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: fmt.Sprintf(\"number of tasks given must not exceed %d\", statsAPIMaxNumTasks),\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\n\tbeforeDate := vals.Get(\"before_date\")\n\tif beforeDate == \"\" {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: \"missing 'before' date\",\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\tsh.filter.BeforeDate, err = time.ParseInLocation(statsAPIDateFormat, beforeDate, time.UTC)\n\tif err != nil {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: errors.Wrapf(err, \"parsing 'before' date in expected format (%s)\", statsAPIDateFormat).Error(),\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\n\tafterDate := vals.Get(\"after_date\")\n\tif afterDate == \"\" {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: \"missing 'after' date\",\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\tsh.filter.AfterDate, err = time.ParseInLocation(statsAPIDateFormat, afterDate, time.UTC)\n\tif err != nil {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: errors.Wrapf(err, \"parsing 'after' date in expected format (%s)\", statsAPIDateFormat).Error(),\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\n\tsh.filter.Sort, err = sh.readSort(vals.Get(\"sort\"))\n\tif err != nil {\n\t\treturn gimlet.ErrorResponse{\n\t\t\tMessage: errors.Wrap(err, \"invalid sort\").Error(),\n\t\t\tStatusCode: http.StatusBadRequest,\n\t\t}\n\t}\n\n\treturn err\n}", "func parseFloatParameterRange(input string) (*FloatParameterRange, error) {\n\tvar start, end float64\n\n\tvar err error\n\tif strings.Index(input, \"-\") >= 0 {\n\t\tarray := strings.Split(input, \"-\")\n\t\tif len(array) != 2 {\n\t\t\terr = errors.New(\"Failed to split the string type\")\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif start, err = strconv.ParseFloat(array[0], 64); err != nil {\n\t\t\t// negative values must be dropped here\n\t\t\treturn nil, err\n\t\t}\n\t\tif end, err = strconv.ParseFloat(array[1], 64); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tif start, err = strconv.ParseFloat(input, 64); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tend = start\n\t}\n\n\tif start > end {\n\t\terr = errors.New(\"The 'max-config-values' attributes MUST be greater or equal to their counterpart in 'min-config-values' attributes.\")\n\t\treturn nil, err\n\t}\n\n\treturn &FloatParameterRange{\n\t\tstart: start,\n\t\tend: end,\n\t}, nil\n}", "func PERCENTILEOPT(wrapped interface{}, percentile interface{}) Expr {\n\tvar expr Expr\n\tswitch t := wrapped.(type) {\n\tcase *ptileOptimized:\n\t\texpr = &t.ptile\n\tdefault:\n\t\texpr = wrapped.(*ptile)\n\t}\n\treturn &ptileOptimized{Wrapped: expr, ptile: *expr.(*ptile), Percentile: exprFor(percentile)}\n}" ]
[ "0.5571344", "0.5541506", "0.5427508", "0.5255528", "0.5161027", "0.50960594", "0.50464016", "0.48835668", "0.4880122", "0.48634067", "0.4857322", "0.4846261", "0.4840661", "0.48108837", "0.47884768", "0.47824305", "0.4720432", "0.46898043", "0.46795207", "0.45831752", "0.4573173", "0.45584735", "0.45448777", "0.4516561", "0.45035195", "0.4502554", "0.44977725", "0.44900644", "0.44849324", "0.44790357", "0.44757488", "0.44755176", "0.44456723", "0.4444602", "0.44436535", "0.44417107", "0.44299978", "0.44211704", "0.44206437", "0.44099873", "0.4398919", "0.43912137", "0.43816286", "0.43604615", "0.4353458", "0.43432933", "0.43354303", "0.43324766", "0.43265277", "0.43264145", "0.43093264", "0.43030098", "0.428234", "0.42662737", "0.42563847", "0.42472768", "0.4244796", "0.42447448", "0.423722", "0.42241696", "0.4218331", "0.42007414", "0.4177539", "0.41748098", "0.41674188", "0.41648522", "0.41643748", "0.41605723", "0.4157685", "0.4143064", "0.414287", "0.4139854", "0.41375032", "0.41304237", "0.41295612", "0.40802366", "0.40695965", "0.40688834", "0.4044775", "0.40441582", "0.4037919", "0.4034258", "0.40286493", "0.40240544", "0.4016468", "0.4015031", "0.4013011", "0.4012393", "0.40072966", "0.400667", "0.39999172", "0.39943194", "0.39933994", "0.3984865", "0.39835224", "0.3966177", "0.39614648", "0.39493594", "0.39456102", "0.3938828" ]
0.745123
0
RoundToDigits rounds the input to digits number of digits after decimal point. Note this incorrectly rounds the last digit of negative numbers.
func RoundToDigits(v float64, digits int) float64 { p := math.Pow(10, float64(digits)) return math.Floor(v*p+0.5) / p }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func roundTo(n float64, decimals uint32) float64 {\n\treturn math.Round(n*float64(decimals)) / float64(decimals)\n}", "func Round(val float64, roundOn float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\tif div >= roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func Round(val float64, roundOn float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\tif div >= roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func Round(val float64, roundOn float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\tif div >= roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func (d Decimal) Round(places int32) Decimal {\n\t// truncate to places + 1\n\tret := d.rescale(-places - 1)\n\n\t// add sign(d) * 0.5\n\tif ret.value.Sign() < 0 {\n\t\tret.value.Sub(ret.value, fiveInt)\n\t} else {\n\t\tret.value.Add(ret.value, fiveInt)\n\t}\n\n\t// floor for positive numbers, ceil for negative numbers\n\t_, m := ret.value.DivMod(ret.value, tenInt, new(big.Int))\n\tret.exp++\n\tif ret.value.Sign() < 0 && m.Cmp(zeroInt) != 0 {\n\t\tret.value.Add(ret.value, oneInt)\n\t}\n\n\treturn ret\n}", "func Round(num, precision float64) float64 {\n\tshift := math.Pow(10, precision)\n\treturn roundInt(num * shift)\n}", "func (fn *formulaFuncs) round(number, digits float64, mode roundMode) float64 {\n\tvar significance float64\n\tif digits > 0 {\n\t\tsignificance = math.Pow(1/10.0, digits)\n\t} else {\n\t\tsignificance = math.Pow(10.0, -digits)\n\t}\n\tval, res := math.Modf(number / significance)\n\tswitch mode {\n\tcase closest:\n\t\tconst eps = 0.499999999\n\t\tif res >= eps {\n\t\t\tval++\n\t\t} else if res <= -eps {\n\t\t\tval--\n\t\t}\n\tcase down:\n\tcase up:\n\t\tif res > 0 {\n\t\t\tval++\n\t\t} else if res < 0 {\n\t\t\tval--\n\t\t}\n\t}\n\treturn val * significance\n}", "func Round(v float64) float64 {\n\treturn RoundToDigits(v, 4)\n}", "func (d *MyDecimal) Round(to *MyDecimal, frac int, roundMode RoundMode) (err error) {\n\t// wordsFracTo is the number of fraction words in buffer.\n\twordsFracTo := (frac + 1) / digitsPerWord\n\tif frac > 0 {\n\t\twordsFracTo = digitsToWords(frac)\n\t}\n\twordsFrac := digitsToWords(int(d.digitsFrac))\n\twordsInt := digitsToWords(int(d.digitsInt))\n\n\troundDigit := int32(roundMode)\n\t/* TODO - fix this code as it won't work for CEILING mode */\n\n\tif wordsInt+wordsFracTo > wordBufLen {\n\t\twordsFracTo = wordBufLen - wordsInt\n\t\tfrac = wordsFracTo * digitsPerWord\n\t\terr = ErrTruncated\n\t}\n\tif int(d.digitsInt)+frac < 0 {\n\t\t*to = zeroMyDecimal\n\t\treturn nil\n\t}\n\tif to != d {\n\t\tcopy(to.wordBuf[:], d.wordBuf[:])\n\t\tto.negative = d.negative\n\t\tto.digitsInt = int8(mathutil.Min(wordsInt, wordBufLen) * digitsPerWord)\n\t}\n\tif wordsFracTo > wordsFrac {\n\t\tidx := wordsInt + wordsFrac\n\t\tfor wordsFracTo > wordsFrac {\n\t\t\twordsFracTo--\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\tif frac >= int(d.digitsFrac) {\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\n\t// Do increment.\n\ttoIdx := wordsInt + wordsFracTo - 1\n\tif frac == wordsFracTo*digitsPerWord {\n\t\tdoInc := false\n\t\tswitch roundMode {\n\t\t// Notice: No support for ceiling mode now.\n\t\tcase ModeCeiling:\n\t\t\t// If any word after scale is not zero, do increment.\n\t\t\t// e.g ceiling 3.0001 to scale 1, gets 3.1\n\t\t\tidx := toIdx + (wordsFrac - wordsFracTo)\n\t\t\tfor idx > toIdx {\n\t\t\t\tif d.wordBuf[idx] != 0 {\n\t\t\t\t\tdoInc = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tidx--\n\t\t\t}\n\t\tcase ModeHalfUp:\n\t\t\tdigAfterScale := d.wordBuf[toIdx+1] / digMask // the first digit after scale.\n\t\t\t// If first digit after scale is equal to or greater than 5, do increment.\n\t\t\tdoInc = digAfterScale >= 5\n\t\tcase ModeTruncate:\n\t\t\t// Never round, just truncate.\n\t\t\tdoInc = false\n\t\t}\n\t\tif doInc {\n\t\t\tif toIdx >= 0 {\n\t\t\t\tto.wordBuf[toIdx]++\n\t\t\t} else {\n\t\t\t\ttoIdx++\n\t\t\t\tto.wordBuf[toIdx] = wordBase\n\t\t\t}\n\t\t} else if wordsInt+wordsFracTo == 0 {\n\t\t\t*to = zeroMyDecimal\n\t\t\treturn nil\n\t\t}\n\t} else {\n\t\t/* TODO - fix this code as it won't work for CEILING mode */\n\t\tpos := wordsFracTo*digitsPerWord - frac - 1\n\t\tshiftedNumber := to.wordBuf[toIdx] / powers10[pos]\n\t\tdigAfterScale := shiftedNumber % 10\n\t\tif digAfterScale > roundDigit || (roundDigit == 5 && digAfterScale == 5) {\n\t\t\tshiftedNumber += 10\n\t\t}\n\t\tto.wordBuf[toIdx] = powers10[pos] * (shiftedNumber - digAfterScale)\n\t}\n\t/*\n\t In case we're rounding e.g. 1.5e9 to 2.0e9, the decimal words inside\n\t the buffer are as follows.\n\n\t Before <1, 5e8>\n\t After <2, 5e8>\n\n\t Hence we need to set the 2nd field to 0.\n\t The same holds if we round 1.5e-9 to 2e-9.\n\t*/\n\tif wordsFracTo < wordsFrac {\n\t\tidx := wordsInt + wordsFracTo\n\t\tif frac == 0 && wordsInt == 0 {\n\t\t\tidx = 1\n\t\t}\n\t\tfor idx < wordBufLen {\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t}\n\n\t// Handle carry.\n\tvar carry int32\n\tif to.wordBuf[toIdx] >= wordBase {\n\t\tcarry = 1\n\t\tto.wordBuf[toIdx] -= wordBase\n\t\tfor carry == 1 && toIdx > 0 {\n\t\t\ttoIdx--\n\t\t\tto.wordBuf[toIdx], carry = add(to.wordBuf[toIdx], 0, carry)\n\t\t}\n\t\tif carry > 0 {\n\t\t\tif wordsInt+wordsFracTo >= wordBufLen {\n\t\t\t\twordsFracTo--\n\t\t\t\tfrac = wordsFracTo * digitsPerWord\n\t\t\t\terr = ErrTruncated\n\t\t\t}\n\t\t\tfor toIdx = wordsInt + mathutil.Max(wordsFracTo, 0); toIdx > 0; toIdx-- {\n\t\t\t\tif toIdx < wordBufLen {\n\t\t\t\t\tto.wordBuf[toIdx] = to.wordBuf[toIdx-1]\n\t\t\t\t} else {\n\t\t\t\t\terr = ErrOverflow\n\t\t\t\t}\n\t\t\t}\n\t\t\tto.wordBuf[toIdx] = 1\n\t\t\t/* We cannot have more than 9 * 9 = 81 digits. */\n\t\t\tif int(to.digitsInt) < digitsPerWord*wordBufLen {\n\t\t\t\tto.digitsInt++\n\t\t\t} else {\n\t\t\t\terr = ErrOverflow\n\t\t\t}\n\t\t}\n\t} else {\n\t\tfor {\n\t\t\tif to.wordBuf[toIdx] != 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif toIdx == 0 {\n\t\t\t\t/* making 'zero' with the proper scale */\n\t\t\t\tidx := wordsFracTo + 1\n\t\t\t\tto.digitsInt = 1\n\t\t\t\tto.digitsFrac = int8(mathutil.Max(frac, 0))\n\t\t\t\tto.negative = false\n\t\t\t\tfor toIdx < idx {\n\t\t\t\t\tto.wordBuf[toIdx] = 0\n\t\t\t\t\ttoIdx++\n\t\t\t\t}\n\t\t\t\tto.resultFrac = to.digitsFrac\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\ttoIdx--\n\t\t}\n\t}\n\t/* Here we check 999.9 -> 1000 case when we need to increase intDigCnt */\n\tfirstDig := mod9[to.digitsInt]\n\tif firstDig > 0 && to.wordBuf[toIdx] >= powers10[firstDig] {\n\t\tto.digitsInt++\n\t}\n\tif frac < 0 {\n\t\tfrac = 0\n\t}\n\tto.digitsFrac = int8(frac)\n\tto.resultFrac = to.digitsFrac\n\treturn\n}", "func (d Decimal) Round(places int32) Decimal {\n\tif d.exp == -places {\n\t\treturn d\n\t}\n\t// truncate to places + 1\n\tret := d.rescale(-places - 1)\n\n\t// add sign(d) * 0.5\n\tif ret.value.Sign() < 0 {\n\t\tret.value.Sub(ret.value, fiveInt)\n\t} else {\n\t\tret.value.Add(ret.value, fiveInt)\n\t}\n\n\t// floor for positive numbers, ceil for negative numbers\n\t_, m := ret.value.DivMod(ret.value, tenInt, new(big.Int))\n\tret.exp++\n\tif ret.value.Sign() < 0 && m.Cmp(zeroInt) != 0 {\n\t\tret.value.Add(ret.value, oneInt)\n\t}\n\n\treturn ret\n}", "func round(val float64, roundOn float64, places int ) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\t_div := math.Copysign(div, val)\n\t_roundOn := math.Copysign(roundOn, val)\n\tif _div >= _roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func (i *Number) Round(precision Number) *Number {\n\tfmtBuf := bytes.NewBuffer([]byte{})\n\tfmtBuf.WriteString(\"%.\")\n\tfmtBuf.WriteString(precision.AsString(*NewNumber(0)))\n\tfmtBuf.WriteString(\"f\")\n\treturn NewNumber(fmt.Sprintf(fmtBuf.String(), i.value))\n}", "func Round(n float64, precision int) float64 {\n\tm := math.Pow10(precision)\n\treturn math.Round(n*m) / m\n}", "func round(num float64, precision int) float64 {\n\toutput := math.Pow(10, float64(precision))\n\tproduct := num * output\n\tsigned := int(product + math.Copysign(0.5, product))\n\t\n\treturn float64(signed) / output\n}", "func Round(f float64, places int) float64 {\n\tshift := math.Pow(10, float64(places))\n\treturn math.Floor(f*shift+.5) / shift\n}", "func RoundNumber(number float64, decimals int) float64 {\n\tprecision := 1.0\n\tfor i := 0; i < decimals; i++ {\n\t\tprecision = precision * 10\n\t}\n\troundedNum := math.Round(number*precision) / precision\n\n\treturn roundedNum\n}", "func (bc ByteCount) ConvertRound(unit ByteCount, precision int) float64 {\n\tp := math.Pow(10, float64(precision))\n\tv := math.Round(p*float64(bc)/float64(unit)) / p\n\treturn v\n}", "func Round(input float64) float64 {\n\tif input < 0 {\n\t\treturn math.Ceil(input - 0.5)\n\t}\n\treturn math.Floor(input + 0.5)\n}", "func (d *MyDecimal) Round(to *MyDecimal, frac int, roundMode RoundMode) (err error) {\n\ttrace_util_0.Count(_mydecimal_00000, 195)\n\t// wordsFracTo is the number of fraction words in buffer.\n\twordsFracTo := (frac + 1) / digitsPerWord\n\tif frac > 0 {\n\t\ttrace_util_0.Count(_mydecimal_00000, 207)\n\t\twordsFracTo = digitsToWords(frac)\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 196)\n\twordsFrac := digitsToWords(int(d.digitsFrac))\n\twordsInt := digitsToWords(int(d.digitsInt))\n\n\troundDigit := int32(roundMode)\n\t/* TODO - fix this code as it won't work for CEILING mode */\n\n\tif wordsInt+wordsFracTo > wordBufLen {\n\t\ttrace_util_0.Count(_mydecimal_00000, 208)\n\t\twordsFracTo = wordBufLen - wordsInt\n\t\tfrac = wordsFracTo * digitsPerWord\n\t\terr = ErrTruncated\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 197)\n\tif int(d.digitsInt)+frac < 0 {\n\t\ttrace_util_0.Count(_mydecimal_00000, 209)\n\t\t*to = zeroMyDecimal\n\t\treturn nil\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 198)\n\tif to != d {\n\t\ttrace_util_0.Count(_mydecimal_00000, 210)\n\t\tcopy(to.wordBuf[:], d.wordBuf[:])\n\t\tto.negative = d.negative\n\t\tto.digitsInt = int8(myMin(wordsInt, wordBufLen) * digitsPerWord)\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 199)\n\tif wordsFracTo > wordsFrac {\n\t\ttrace_util_0.Count(_mydecimal_00000, 211)\n\t\tidx := wordsInt + wordsFrac\n\t\tfor wordsFracTo > wordsFrac {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 213)\n\t\t\twordsFracTo--\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 212)\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 200)\n\tif frac >= int(d.digitsFrac) {\n\t\ttrace_util_0.Count(_mydecimal_00000, 214)\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\n\t// Do increment.\n\ttrace_util_0.Count(_mydecimal_00000, 201)\n\ttoIdx := wordsInt + wordsFracTo - 1\n\tif frac == wordsFracTo*digitsPerWord {\n\t\ttrace_util_0.Count(_mydecimal_00000, 215)\n\t\tdoInc := false\n\t\tswitch roundMode {\n\t\t// Notice: No support for ceiling mode now.\n\t\tcase modeCeiling:\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 217)\n\t\t\t// If any word after scale is not zero, do increment.\n\t\t\t// e.g ceiling 3.0001 to scale 1, gets 3.1\n\t\t\tidx := toIdx + (wordsFrac - wordsFracTo)\n\t\t\tfor idx > toIdx {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 220)\n\t\t\t\tif d.wordBuf[idx] != 0 {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 222)\n\t\t\t\t\tdoInc = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 221)\n\t\t\t\tidx--\n\t\t\t}\n\t\tcase ModeHalfEven:\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 218)\n\t\t\tdigAfterScale := d.wordBuf[toIdx+1] / digMask // the first digit after scale.\n\t\t\t// If first digit after scale is 5 and round even, do increment if digit at scale is odd.\n\t\t\tdoInc = (digAfterScale > 5) || (digAfterScale == 5)\n\t\tcase ModeTruncate:\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 219)\n\t\t\t// Never round, just truncate.\n\t\t\tdoInc = false\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 216)\n\t\tif doInc {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 223)\n\t\t\tif toIdx >= 0 {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 224)\n\t\t\t\tto.wordBuf[toIdx]++\n\t\t\t} else {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 225)\n\t\t\t\t{\n\t\t\t\t\ttoIdx++\n\t\t\t\t\tto.wordBuf[toIdx] = wordBase\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 226)\n\t\t\tif wordsInt+wordsFracTo == 0 {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 227)\n\t\t\t\t*to = zeroMyDecimal\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t} else {\n\t\ttrace_util_0.Count(_mydecimal_00000, 228)\n\t\t{\n\t\t\t/* TODO - fix this code as it won't work for CEILING mode */\n\t\t\tpos := wordsFracTo*digitsPerWord - frac - 1\n\t\t\tshiftedNumber := to.wordBuf[toIdx] / powers10[pos]\n\t\t\tdigAfterScale := shiftedNumber % 10\n\t\t\tif digAfterScale > roundDigit || (roundDigit == 5 && digAfterScale == 5) {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 230)\n\t\t\t\tshiftedNumber += 10\n\t\t\t}\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 229)\n\t\t\tto.wordBuf[toIdx] = powers10[pos] * (shiftedNumber - digAfterScale)\n\t\t}\n\t}\n\t/*\n\t In case we're rounding e.g. 1.5e9 to 2.0e9, the decimal words inside\n\t the buffer are as follows.\n\n\t Before <1, 5e8>\n\t After <2, 5e8>\n\n\t Hence we need to set the 2nd field to 0.\n\t The same holds if we round 1.5e-9 to 2e-9.\n\t*/\n\ttrace_util_0.Count(_mydecimal_00000, 202)\n\tif wordsFracTo < wordsFrac {\n\t\ttrace_util_0.Count(_mydecimal_00000, 231)\n\t\tidx := wordsInt + wordsFracTo\n\t\tif frac == 0 && wordsInt == 0 {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 233)\n\t\t\tidx = 1\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 232)\n\t\tfor idx < wordBufLen {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 234)\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t}\n\n\t// Handle carry.\n\ttrace_util_0.Count(_mydecimal_00000, 203)\n\tvar carry int32\n\tif to.wordBuf[toIdx] >= wordBase {\n\t\ttrace_util_0.Count(_mydecimal_00000, 235)\n\t\tcarry = 1\n\t\tto.wordBuf[toIdx] -= wordBase\n\t\tfor carry == 1 && toIdx > 0 {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 237)\n\t\t\ttoIdx--\n\t\t\tto.wordBuf[toIdx], carry = add(to.wordBuf[toIdx], 0, carry)\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 236)\n\t\tif carry > 0 {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 238)\n\t\t\tif wordsInt+wordsFracTo >= wordBufLen {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 241)\n\t\t\t\twordsFracTo--\n\t\t\t\tfrac = wordsFracTo * digitsPerWord\n\t\t\t\terr = ErrTruncated\n\t\t\t}\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 239)\n\t\t\tfor toIdx = wordsInt + myMax(wordsFracTo, 0); toIdx > 0; toIdx-- {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 242)\n\t\t\t\tif toIdx < wordBufLen {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 243)\n\t\t\t\t\tto.wordBuf[toIdx] = to.wordBuf[toIdx-1]\n\t\t\t\t} else {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 244)\n\t\t\t\t\t{\n\t\t\t\t\t\terr = ErrOverflow\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 240)\n\t\t\tto.wordBuf[toIdx] = 1\n\t\t\t/* We cannot have more than 9 * 9 = 81 digits. */\n\t\t\tif int(to.digitsInt) < digitsPerWord*wordBufLen {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 245)\n\t\t\t\tto.digitsInt++\n\t\t\t} else {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 246)\n\t\t\t\t{\n\t\t\t\t\terr = ErrOverflow\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else {\n\t\ttrace_util_0.Count(_mydecimal_00000, 247)\n\t\t{\n\t\t\tfor {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 248)\n\t\t\t\tif to.wordBuf[toIdx] != 0 {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 251)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 249)\n\t\t\t\tif toIdx == 0 {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 252)\n\t\t\t\t\t/* making 'zero' with the proper scale */\n\t\t\t\t\tidx := wordsFracTo + 1\n\t\t\t\t\tto.digitsInt = 1\n\t\t\t\t\tto.digitsFrac = int8(myMax(frac, 0))\n\t\t\t\t\tto.negative = false\n\t\t\t\t\tfor toIdx < idx {\n\t\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 254)\n\t\t\t\t\t\tto.wordBuf[toIdx] = 0\n\t\t\t\t\t\ttoIdx++\n\t\t\t\t\t}\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 253)\n\t\t\t\t\tto.resultFrac = to.digitsFrac\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 250)\n\t\t\t\ttoIdx--\n\t\t\t}\n\t\t}\n\t}\n\t/* Here we check 999.9 -> 1000 case when we need to increase intDigCnt */\n\ttrace_util_0.Count(_mydecimal_00000, 204)\n\tfirstDig := mod9[to.digitsInt]\n\tif firstDig > 0 && to.wordBuf[toIdx] >= powers10[firstDig] {\n\t\ttrace_util_0.Count(_mydecimal_00000, 255)\n\t\tto.digitsInt++\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 205)\n\tif frac < 0 {\n\t\ttrace_util_0.Count(_mydecimal_00000, 256)\n\t\tfrac = 0\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 206)\n\tto.digitsFrac = int8(frac)\n\tto.resultFrac = to.digitsFrac\n\treturn\n}", "func round(n float64) float64 {\n\treturn math.Trunc(n)\n}", "func RoundDown(input float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * input\n\tround = math.Floor(digit)\n\tnewVal = round / pow\n\treturn\n}", "func Round(value float64, precision int) float64 {\n\tmultiplier := math.Pow10(precision)\n\tinterim := math.Floor(value*multiplier + 0.5)\n\treturn interim / multiplier\n}", "func Round(v float64) int {\n\tx := strconv.FormatFloat(v, 'f', 0, 64)\n\tr, err := strconv.Atoi(x)\n\tif err != nil {\n\t\t// Shouldn't happen.\n\t\tlog.Fatalf(\"failed to parse float %f with 0 decimal values as int: %err\", x, err)\n\t}\n\treturn r\n}", "func (f Fixed) Round(n int) Fixed {\n\tif f.IsNaN() {\n\t\treturn NaN\n\t}\n\n\tfraction := f.fp % scale\n\tf0 := fraction / int64(math.Pow10(nPlaces-n-1))\n\tdigit := abs(f0 % 10)\n\tf0 = (f0 / 10)\n\tif digit >= 5 {\n\t\tf0 += 1 * sign(f.fp)\n\t}\n\tf0 = f0 * int64(math.Pow10(nPlaces-n))\n\n\tintpart := f.fp - fraction\n\tfp := intpart + f0\n\n\treturn Fixed{fp: fp}\n}", "func Round(arg float64) float64 {\n\treturn math.Round(arg)\n}", "func (d Decimal) NumDigits() int {\n\t// Note(mwoss): It can be optimized, unnecessary cast of big.Int to string\n\tif d.IsNegative() {\n\t\treturn len(d.value.String()) - 1\n\t}\n\treturn len(d.value.String())\n}", "func roundNumber(number int) int {\n\trounded := ((number / 1000) * 1000) + 1000\n\treturn rounded\n}", "func NumDigits(x int) int {\n\n\tcountDigits := 0\n\tfor x != 0 {\n\t\tx = x / 10\n\t\tcountDigits++\n\t}\n\treturn countDigits\n\n}", "func (a *decimal) Round(nd int) {\n\tif nd < 0 || nd >= a.nd {\n\t\treturn\n\t}\n\tif shouldRoundUp(a, nd) {\n\t\ta.RoundUp(nd)\n\t} else {\n\t\ta.RoundDown(nd)\n\t}\n}", "func (a *decimal) RoundUp(nd int) {\n\tif nd < 0 || nd >= a.nd {\n\t\treturn\n\t}\n\n\t// round up\n\tfor i := nd - 1; i >= 0; i-- {\n\t\tc := a.d[i]\n\t\tif c < '9' { // can stop after this digit\n\t\t\ta.d[i]++\n\t\t\ta.nd = i + 1\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Number is all 9s.\n\t// Change to single 1 with adjusted decimal point.\n\ta.d[0] = '1'\n\ta.nd = 1\n\ta.dp++\n}", "func Round(val float64, place int) float64 {\n\tshift := math.Pow(10, float64(place))\n\treturn math.Floor(val*shift+.5) / shift\n}", "func GetDigits(n int) (digits []int) {\n\tbackwards := []int{}\n\tfor n != 0 {\n\t\trem := int(math.Mod(float64(n), float64(10)))\n\t\tbackwards = append(backwards, rem)\n\t\tn /= 10\n\t}\n\n\tdigits = make([]int, len(backwards))\n\tfor i := len(backwards); i > 0; i-- {\n\t\tdigits[len(backwards)-i] = backwards[i-1]\n\t}\n\treturn digits\n}", "func sumDigits(input int) int {\n\n\tsum := 0\n\n\tfor {\n\n\t\tif input < 10 && (sum+input) >= 10 {\n\t\t\tsum += input\n\t\t\tinput = sum\n\t\t\tsum = 0\n\t\t\tcontinue\n\t\t}\n\n\t\tif input < 10 && (sum+input) < 10 {\n\t\t\treturn sum + input\n\t\t}\n\n\t\tdigit := input % 10\n\t\tsum += digit\n\t\tinput = input / 10\n\t}\n\n}", "func (d Decimal) DivRound(d2 Decimal, precision int32) Decimal {\n\t// QuoRem already checks initialization\n\tq, r := d.QuoRem(d2, precision)\n\t// the actual rounding decision is based on comparing r*10^precision and d2/2\n\t// instead compare 2 r 10 ^precision and d2\n\tvar rv2 big.Int\n\trv2.Abs(r.value)\n\trv2.Lsh(&rv2, 1)\n\t// now rv2 = abs(r.value) * 2\n\tr2 := Decimal{value: &rv2, exp: r.exp + precision}\n\t// r2 is now 2 * r * 10 ^ precision\n\tvar c = r2.Cmp(d2.Abs())\n\n\tif c < 0 {\n\t\treturn q\n\t}\n\n\tif d.value.Sign()*d2.value.Sign() < 0 {\n\t\treturn q.Sub(New(1, -precision))\n\t}\n\n\treturn q.Add(New(1, -precision))\n}", "func sumDigits(x *big.Int) *big.Int {\n\tten := big.NewInt(10)\n\tsum := big.NewInt(0)\n\tmod := big.NewInt(0)\n\n\tfor ten.Cmp(x) < 0 {\n\t\tsum.Add(sum, mod.Mod(x, ten))\n\t\tx.Div(x, ten)\n\t}\n\tsum.Add(sum, x)\n\treturn sum\n}", "func (d Decimal) DivRound(d2 Decimal, precision int32) Decimal {\n\t// QuoRem already checks initialization\n\tq, r := d.QuoRem(d2, precision)\n\t// the actual rounding decision is based on comparing r*10^precision and d2/2\n\t// instead compare 2 r 10 ^precision and d2\n\tvar rv2 big.Int\n\trv2.Abs(r.value)\n\trv2.Lsh(&rv2, 1)\n\t// now rv2 = abs(r.value) * 2\n\tr2 := Decimal{value: &rv2, exp: r.exp + precision}\n\t// r2 is now 2 * r * 10 ^ precision\n\tvar c = r2.Cmp(d2.Abs())\n\n\tif c < 0 {\n\t\treturn q\n\t}\n\n\tif d.value.Sign()*d2.value.Sign() < 0 {\n\t\treturn q.Sub(NewDec(1, -precision))\n\t}\n\n\treturn q.Add(NewDec(1, -precision))\n}", "func SumDigits(num int) int {\n\tdigits := []rune(strconv.Itoa(num))\n\n\tfor len(digits) > 1 {\n\t\tres := 0\n\t\tfor _, v := range digits {\n\t\t\tdigit, _ := strconv.Atoi(string(v))\n\t\t\tres += digit\n\t\t}\n\n\t\tdigits = []rune(strconv.Itoa(res))\n\t}\n\n\tnum, _ = strconv.Atoi(string(digits[0]))\n\treturn num\n}", "func (fn *formulaFuncs) ROUND(argsList *list.List) formulaArg {\n\tif argsList.Len() != 2 {\n\t\treturn newErrorFormulaArg(formulaErrorVALUE, \"ROUND requires 2 numeric arguments\")\n\t}\n\tnumber := argsList.Front().Value.(formulaArg).ToNumber()\n\tif number.Type == ArgError {\n\t\treturn number\n\t}\n\tdigits := argsList.Back().Value.(formulaArg).ToNumber()\n\tif digits.Type == ArgError {\n\t\treturn digits\n\t}\n\treturn newNumberFormulaArg(fn.round(number.Number, digits.Number, closest))\n}", "func round(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func round(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func round(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func getSumOfDigits(n int64) int64 {\n\tvar sum int64\n\tfor n > 0 {\n\t\tsum += n % 10\n\t\tn /= 10\n\t}\n\treturn sum\n}", "func (a *decimal) RoundedInteger() uint64 {\n\tif a.dp > 20 {\n\t\treturn 0xFFFFFFFFFFFFFFFF\n\t}\n\tvar i int\n\tn := uint64(0)\n\tfor i = 0; i < a.dp && i < a.nd; i++ {\n\t\tn = n*10 + uint64(a.d[i]-'0')\n\t}\n\tfor ; i < a.dp; i++ {\n\t\tn *= 10\n\t}\n\tif shouldRoundUp(a, a.dp) {\n\t\tn++\n\t}\n\treturn n\n}", "func Round(x Decimal, unit Decimal, mode RoundingMode) Decimal {\n\tprec := unit.Exponent() * -1\n\n\tswitch mode {\n\tcase RoundDown:\n\t\trounded := x.RoundDown(prec)\n\t\treturn rounded.Sub(rounded.Mod(unit)).Truncate(prec)\n\tcase RoundUp:\n\t\trounded := x.RoundUp(prec)\n\t\treturn rounded.Add(rounded.Mod(unit)).Truncate(prec)\n\tcase RoundToNearest:\n\t\treturn x.RoundNearest(unit).Truncate(prec)\n\t}\n\treturn Decimal{}\n}", "func Round(f float64) int {\n\tif math.Abs(f) < 0.5 {\n\t\treturn 0\n\t}\n\treturn int(f + math.Copysign(0.5, f))\n}", "func (d *MyDecimal) GetDigitsFrac() int8 {\n\ttrace_util_0.Count(_mydecimal_00000, 35)\n\treturn d.digitsFrac\n}", "func digitsToInt(digits ...int) int {\n\tn := 0\n\tfor i := range digits {\n\t\tn += digits[i] * int(math.Pow10(len(digits)-i-1))\n\t}\n\treturn n\n}", "func (a *decimal) RoundDown(nd int) {\n\tif nd < 0 || nd >= a.nd {\n\t\treturn\n\t}\n\ta.nd = nd\n\ttrim(a)\n}", "func roundValue(initialValue float64, floor float64) float64 {\n\tb := []byte(strings.Trim(fmt.Sprintf(\"%f\", initialValue), \"0\"))\n\tvalue := initialValue\n\tfor i := len(b)-1; i >= 0; i-- {\n\t\tif b[i] != '.' {\n\t\t\tb[i] = '0'\n\t\t\tround, e := strconv.ParseFloat(string(b), 64)\n\t\t\tif e != nil || round <= floor {\n\t\t\t\treturn value\n\t\t\t}\n\t\t\tvalue = round\n\t\t}\n\t}\n\treturn value\n}", "func (m mathUtil) RoundDown(value, roundTo float64) float64 {\n\td1 := math.Floor(value / roundTo)\n\treturn d1 * roundTo\n}", "func CountNumDigits(x int) int {\n\tcount := 1\n\tfor x/10 != 0 {\n\t\tcount++\n\t\tx /= 10\n\t}\n\treturn count\n}", "func Round(f float64) float64 {\n\tif f > 0 {\n\t\treturn math.Floor(f + 0.5)\n\t}\n\treturn math.Ceil(f - 0.5)\n}", "func round(x float64) float64 {\n\tt := math.Trunc(x)\n\tif math.Abs(x-t) >= 0.5 {\n\t\treturn t + math.Copysign(1, x)\n\t}\n\treturn t\n}", "func sumSquareDigits(input int, sum int) int {\n\tif input <= 0 {\n\t\treturn sum\n\t} else {\n\t\tones := input % 10\n\t\tsquared := ones * ones\n\t\treturn sumSquareDigits(input / 10, sum + squared)\n\t}\n}", "func Round64(x float64, n int) float64 {\n\tif math.IsNaN(x) {\n\t\treturn x\n\t}\n\tshift := math.Pow(10, float64(n))\n\treturn math.Floor(x*shift+0.5) / shift\n}", "func round(val float64) int {\n\tif val < 0 {\n\t\treturn int(val - 0.5)\n\t}\n\treturn int(val + 0.5)\n}", "func RoundFloat64(number float64) int {\n\t// Truncate the float and use this to get the decimal component\n\ttruncNum := math.Trunc(number)\n\tdecimalComp := number - truncNum\n\n\tvar result float64\n\tif decimalComp >= 0.5 {\n\t\t// Use Ceil\n\t\tresult = math.Ceil(number)\n\t} else {\n\t\t// Use Floor\n\t\tresult = math.Floor(number)\n\t}\n\n\treturn int(result)\n}", "func (f Fixed) Round(n int) Fixed {\n\tif f.IsNaN() {\n\t\treturn NaN\n\t}\n\n\tround := .5\n\n\tf0 := f.Frac()\n\tf0 = f0*math.Pow10(n) + round\n\tf0 = float64(int(f0)) / math.Pow10(n)\n\n\treturn NewFromFloat(float64(f.UInt()) + f0)\n}", "func Round(x float64) float64 {\n\n\treturn math.Floor(x + 0.5)\n}", "func FindDigits(n int32) int32 {\n\tvar copy, divisor int32 = n, 0\n\n\tfor copy != 0 {\n\t\tdigit := copy % int32(10)\n\t\tif digit != 0 {\n\t\t\tif n%digit == 0 {\n\t\t\t\tdivisor++\n\t\t\t}\n\t\t}\n\t\tcopy = copy / 10\n\t}\n\treturn divisor\n}", "func Log10(z *inf.Dec, x *inf.Dec, s inf.Scale) (*inf.Dec, error) {\n\tz, err := Log(z, x, s)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn z.QuoRound(z, decimalLog10, s, inf.RoundHalfUp), nil\n}", "func Round(f float64) float64 {\n\treturn math.Round(f*1000000000) / 1000000000\n}", "func ExampleRound() {\n\tds := []time.Duration{\n\t\ttime.Hour + time.Second + 123*time.Millisecond, // 1h0m1.123s\n\t\ttime.Hour + time.Second + time.Microsecond, // 1h0m1.000001s\n\t\t123456789 * time.Nanosecond, // 123.456789ms\n\t\t123456 * time.Nanosecond, // 123.456µs\n\t\t123 * time.Nanosecond, // 123ns\n\t}\n\n\tfmt.Println(\"Duration |0 digits |1 digit |2 digits |3 digits |\")\n\tfmt.Println(\"-------------------------------------------------------------------\")\n\tfor _, d := range ds {\n\t\tfmt.Printf(\"%-14v|\", d)\n\t\tfor digits := 0; digits <= 3; digits++ {\n\t\t\tfmt.Printf(\"%-12v|\", Round(d, digits))\n\t\t}\n\t\tfmt.Println()\n\t}\n\n\t// Output:\n\t// Duration |0 digits |1 digit |2 digits |3 digits |\n\t// -------------------------------------------------------------------\n\t// 1h0m1.123s |1h0m1s |1h0m1.1s |1h0m1.12s |1h0m1.123s |\n\t// 1h0m1.000001s |1h0m1s |1h0m1s |1h0m1s |1h0m1s |\n\t// 123.456789ms |123ms |123.5ms |123.46ms |123.457ms |\n\t// 123.456µs |123µs |123.5µs |123.46µs |123.456µs |\n\t// 123ns |123ns |123ns |123ns |123ns |\n}", "func round(val float64) float64 {\n\treturn math.Round(val*100) / 100\n}", "func Round(x float64) float64 {\n\treturn math.Floor(x + 0.5)\n}", "func RoundToInt(f float64) int {\n\tif math.Abs(f) < 0.5 {\n\t\treturn 0\n\t}\n\treturn int(f + math.Copysign(0.5, f))\n}", "func round(f float64) int {\n\tif math.Abs(f) < 0.5 {\n\t\treturn 0\n\t}\n\treturn int(f + math.Copysign(0.5, f))\n}", "func (d *MyDecimal) GetDigitsInt() int8 {\n\treturn d.digitsInt\n}", "func RoundDown(d decimal.Decimal, precision int32) decimal.Decimal {\n\n\treturn d.Truncate(precision)\n}", "func round(f float64) int {\n\tif f < 0 {\n\t\treturn int(math.Ceil(f - 0.5))\n\t}\n\treturn int(math.Floor(f + 0.5))\n}", "func decimals(dec int64) int {\n\tif dec < 1 {\n\t\treturn 0\n\t}\n\treturn int(math.Floor(math.Log10(float64(dec))))\n}", "func roundFloat(x float64, prec int) float64 {\n\tvar rounder float64\n\tpow := math.Pow(10, float64(prec))\n\tintermed := x * pow\n\t_, frac := math.Modf(intermed)\n\tx = .5\n\tif frac < 0.0 {\n\t\tx = -.5\n\t}\n\tif frac >= x {\n\t\trounder = math.Ceil(intermed)\n\t} else {\n\t\trounder = math.Floor(intermed)\n\t}\n\n\treturn rounder / pow\n}", "func DigitSum(n int) int {\n\tif n < 0 {\n\t\treturn -1\n\t}\n\n\tres := 0\n\n\tfor n > 0 {\n\t\tres += n % 10\n\t\tn /= 10\n\t}\n\n\treturn res\n}", "func (d Decimal) RoundDown(places int32) Decimal {\n\tif d.exp >= -places {\n\t\treturn d\n\t}\n\n\trescaled := d.rescale(-places)\n\tif d.Equal(rescaled) {\n\t\treturn d\n\t}\n\treturn rescaled\n}", "func ToFixed(num float64, precision int) float64 {\n\toutput := math.Pow(10, float64(precision))\n\treturn float64(round(num*output)) / output\n}", "func ToFixed(num float64, precision int) float64 {\n\toutput := math.Pow(10, float64(precision))\n\treturn float64(round(num*output)) / output\n}", "func (d Decimal) RoundFloor(places int32) Decimal {\n\tif d.exp >= -places {\n\t\treturn d\n\t}\n\n\trescaled := d.rescale(-places)\n\tif d.Equal(rescaled) {\n\t\treturn d\n\t}\n\n\tif d.value.Sign() < 0 {\n\t\trescaled.value.Sub(rescaled.value, oneInt)\n\t}\n\n\treturn rescaled\n}", "func (buf *Buffer) NDigits(n, i, d int, pad byte) {\n\tj := n - 1\n\tfor ; j >= 0 && d > 0; j-- {\n\t\tbuf.Tmp[i+j] = digits[d%10]\n\t\td /= 10\n\t}\n\tfor ; j >= 0; j-- {\n\t\tbuf.Tmp[i+j] = pad\n\t}\n}", "func RoundFixedDecimal(x float64, precision int) float64 {\n\trounded, _ := d.NewFromFloat(x).Round(int32(precision)).Float64()\n\treturn rounded\n}", "func (d *MyDecimal) GetDigitsFrac() int8 {\n\treturn d.digitsFrac\n}", "func RoundUp(input float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * input\n\tround = math.Ceil(digit)\n\tnewVal = round / pow\n\treturn\n}", "func (d LegacyDec) RoundInt64() int64 {\n\tchopped := chopPrecisionAndRoundNonMutative(d.i)\n\tif !chopped.IsInt64() {\n\t\tpanic(\"Int64() out of bound\")\n\t}\n\treturn chopped.Int64()\n}", "func chopPrecisionAndRound(d *big.Int) *big.Int {\n\t// remove the negative and add it back when returning\n\tif d.Sign() == -1 {\n\t\t// make d positive, compute chopped value, and then un-mutate d\n\t\td = d.Neg(d)\n\t\td = chopPrecisionAndRound(d)\n\t\td = d.Neg(d)\n\t\treturn d\n\t}\n\n\t// get the truncated quotient and remainder\n\tquo, rem := d, big.NewInt(0)\n\tquo, rem = quo.QuoRem(d, precisionReuse, rem)\n\n\tif rem.Sign() == 0 { // remainder is zero\n\t\treturn quo\n\t}\n\n\tswitch rem.Cmp(fivePrecision) {\n\tcase -1:\n\t\treturn quo\n\tcase 1:\n\t\treturn quo.Add(quo, oneInt)\n\tdefault: // bankers rounding must take place\n\t\t// always round to an even number\n\t\tif quo.Bit(0) == 0 {\n\t\t\treturn quo\n\t\t}\n\t\treturn quo.Add(quo, oneInt)\n\t}\n}", "func (buf *buffer) nDigits(n, i, d int, pad byte) {\n\tj := n - 1\n\tfor ; j >= 0 && d > 0; j-- {\n\t\tbuf.tmp[i+j] = digits[d%10]\n\t\td /= 10\n\t}\n\tfor ; j >= 0; j-- {\n\t\tbuf.tmp[i+j] = pad\n\t}\n}", "func DropletPrecisionToDivisor(precision uint8) uint64 {\n\tif precision > droplet.Exponent {\n\t\tpanic(\"precision must be <= droplet.Exponent\")\n\t}\n\n\tn := droplet.Exponent - precision\n\tvar i uint64 = 1\n\tfor k := uint8(0); k < n; k++ {\n\t\ti = i * 10\n\t}\n\treturn i\n}", "func getNumDigits(number int) int {\n\tcount := 0\n\tfor {\n\t\tcount += 1\n\t\tnumber /= 10\n\t\tif number == 0 {\n\t\t\treturn count\n\t\t}\n\t}\n}", "func RoundCharacters(mask Mask) Mask {\n\tsize := mask.CharacterSize()\n\tsize.X <<= 1\n\tsize.Y <<= 1\n\tbounds := mask.Bounds()\n\tbounds.Min.X <<= 1\n\tbounds.Min.Y <<= 1\n\tbounds.Max.X <<= 1\n\tbounds.Max.Y <<= 1\n\treturn filterCharacterRounding{\n\t\tmask: mask,\n\t\tsize: size,\n\t\tbounds: bounds,\n\t}\n}", "func (x Vector64) Round(n int) Vector64 {\n\tfor i := 0; i < len(x); i++ {\n\t\tx[i] = Round64(x[i], n)\n\t}\n\treturn x\n}", "func TestRound(t *testing.T) {\n\t//TODO change to bigint\n\tres := round(9, int64(1000))\n\tassert.Equal(t, res, int64(1000))\n\tres = round(999, 1000)\n\tassert.Equal(t, res, int64(1000))\n\tres = round(0, 1000)\n\tassert.Equal(t, res, int64(1000))\n\tres = round(1000, 1000)\n\tassert.Equal(t, res, int64(2000))\n\tres = round(1001, 1000)\n\tassert.Equal(t, res, int64(2000))\n\tres = round(1999, 1000)\n\tassert.Equal(t, res, int64(2000))\n\tres = round(2000, 1000)\n\tassert.Equal(t, res, int64(3000))\n\tres = round(2001, 1000)\n\tassert.Equal(t, res, int64(3000))\n}", "func toFixed(num float64, precision int) float64 {\n\toutput := math.Pow(10, float64(precision))\n\treturn float64(round(num*output)) / output\n}", "func RoundSignificant(x float64, precision int) float64 {\n\tvar negative bool\n\tif x == 0 {\n\t\treturn x\n\t} else if x < 0 {\n\t\tx = -x\n\t\tnegative = true\n\t}\n\n\tvar rounded float64\n\tif pow := int(math.Floor(math.Log10(x))); pow < 0 && precision >= 0 {\n\t\trounded = math.Round(x*math.Pow10(precision-pow)) * math.Pow10(pow-precision)\n\t} else {\n\t\trounded = math.Round(x*math.Pow10(precision)) * math.Pow10(-precision)\n\t}\n\n\tif negative {\n\t\trounded *= -1\n\t}\n\n\treturn rounded\n}", "func compareDigits(num int, condition func(int, int) bool, successResult bool, lastCondition func() bool) bool {\n\tvar remain int\n\tprevRemain := num % 10\n\tfor num > 1 {\n\t\tnum /= 10\n\t\tremain = num % 10\n\t\tif condition(prevRemain, remain) {\n\t\t\treturn successResult\n\t\t}\n\t\tprevRemain = remain\n\t}\n\treturn lastCondition()\n}", "func round(value float64) float64 {\n\treturn math.Floor(value + .5)\n}", "func RoundFloat(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func DigitsWithSize(size int) string {\n\treturn stringWithSize(size, digits)\n}", "func digits(n int) []int {\n\td := make([]int, 0, 18)\n\tfor n > 9 {\n\t\td = append(d, n%10)\n\t\tn /= 10\n\t}\n\treturn d\n}", "func FloatRound(f float64, n int) float64 {\r\n\tpow10_n := math.Pow10(n)\r\n\toffset := 0.5\r\n\tif f < 0 {\r\n\t\toffset = -offset\r\n\t}\r\n\treturn math.Trunc((f+offset/pow10_n)*pow10_n) / pow10_n\r\n}", "func originalDigits(s string) string {\n\n}", "func Rnd(r int64, trunc float64) int64 {\n\tif trunc > 0 {\n\t\tif trunc >= Round {\n\t\t\tr++\n\t\t}\n\t} else {\n\t\tif trunc < Roundn {\n\t\t\tr--\n\t\t}\n\t}\n\treturn r\n}", "func RoundFloat64(valor float64, dec int) float64 {\n\tdec = ReturnIf(dec <= 0, 1, dec).(int)\n\tformat := fmt.Sprintf(\".%df\", dec)\n\tformat = \"%\" + format\n\tvalstr := fmt.Sprintf(format, valor)\n\tval, err := StrToFloat64(valstr)\n\tif err != nil {\n\t\treturn 0.00\n\t}\n\treturn val\n\n}" ]
[ "0.63414496", "0.6030092", "0.6030092", "0.6030092", "0.60244167", "0.60103554", "0.58290493", "0.5823246", "0.58036333", "0.57871675", "0.57697266", "0.57455796", "0.56449634", "0.56388664", "0.55538213", "0.5522588", "0.5522047", "0.55094457", "0.55077565", "0.546716", "0.54583156", "0.5432235", "0.5431226", "0.54238707", "0.54236037", "0.541931", "0.5378886", "0.5285738", "0.5233789", "0.5218592", "0.5194653", "0.51730317", "0.51656413", "0.5163744", "0.51046103", "0.5093215", "0.506534", "0.5053174", "0.50505096", "0.50505096", "0.50505096", "0.50182533", "0.50170535", "0.5005112", "0.500495", "0.49893486", "0.49654672", "0.4956813", "0.49557588", "0.49529544", "0.49249", "0.49229297", "0.4891188", "0.4886714", "0.48866898", "0.4872971", "0.48502886", "0.48495665", "0.48222387", "0.48198378", "0.4797122", "0.47916123", "0.47906294", "0.4774842", "0.47660992", "0.47544616", "0.46885255", "0.46845365", "0.4669286", "0.46517068", "0.46470025", "0.4618678", "0.46132222", "0.46129066", "0.45973578", "0.45973578", "0.45932412", "0.45574528", "0.45515546", "0.45328555", "0.45296612", "0.45261002", "0.45076528", "0.4505723", "0.4489092", "0.4485851", "0.44846442", "0.44763774", "0.44741452", "0.44723836", "0.44690362", "0.44401908", "0.44367385", "0.4432222", "0.44316792", "0.44145763", "0.44025782", "0.43895692", "0.43832752", "0.43787333" ]
0.78922504
0
Round rounds to 4 digits after the decimal point.
func Round(v float64) float64 { return RoundToDigits(v, 4) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func round(v float64) float64 {\n\treturn math.Floor(v + 0.5)\n}", "func Round(x float64) float64 {\n\treturn math.Floor(x + 0.5)\n}", "func Round(value float64, precision int) float64 {\n\tmultiplier := math.Pow10(precision)\n\tinterim := math.Floor(value*multiplier + 0.5)\n\treturn interim / multiplier\n}", "func Round(x float64) float64 {\n\n\treturn math.Floor(x + 0.5)\n}", "func round(value float64) float64 {\n\treturn math.Floor(value + .5)\n}", "func Round(f float64) float64 {\n\tif f > 0 {\n\t\treturn math.Floor(f + 0.5)\n\t}\n\treturn math.Ceil(f - 0.5)\n}", "func Round(input float64) float64 {\n\tif input < 0 {\n\t\treturn math.Ceil(input - 0.5)\n\t}\n\treturn math.Floor(input + 0.5)\n}", "func round(val float64) float64 {\n\treturn math.Round(val*100) / 100\n}", "func round(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func round(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func round(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func (i *Number) Round(precision Number) *Number {\n\tfmtBuf := bytes.NewBuffer([]byte{})\n\tfmtBuf.WriteString(\"%.\")\n\tfmtBuf.WriteString(precision.AsString(*NewNumber(0)))\n\tfmtBuf.WriteString(\"f\")\n\treturn NewNumber(fmt.Sprintf(fmtBuf.String(), i.value))\n}", "func round(f float64) int {\n\tif f < 0 {\n\t\treturn int(math.Ceil(f - 0.5))\n\t}\n\treturn int(math.Floor(f + 0.5))\n}", "func round(n float64) float64 {\n\treturn math.Trunc(n)\n}", "func round(num float64, precision int) float64 {\n\toutput := math.Pow(10, float64(precision))\n\tproduct := num * output\n\tsigned := int(product + math.Copysign(0.5, product))\n\t\n\treturn float64(signed) / output\n}", "func roundTo(n float64, decimals uint32) float64 {\n\treturn math.Round(n*float64(decimals)) / float64(decimals)\n}", "func float64Round(v float64) float64 {\n\treturn math.Round(v*100) / 100\n}", "func round(val float64) int {\n\tif val < 0 {\n\t\treturn int(val - 0.5)\n\t}\n\treturn int(val + 0.5)\n}", "func Round(f float64, places int) float64 {\n\tshift := math.Pow(10, float64(places))\n\treturn math.Floor(f*shift+.5) / shift\n}", "func Round(val float64, place int) float64 {\n\tshift := math.Pow(10, float64(place))\n\treturn math.Floor(val*shift+.5) / shift\n}", "func round(x float64) float64 {\n\tt := math.Trunc(x)\n\tif math.Abs(x-t) >= 0.5 {\n\t\treturn t + math.Copysign(1, x)\n\t}\n\treturn t\n}", "func Round(arg float64) float64 {\n\treturn math.Round(arg)\n}", "func roundFloat(x float64, prec int) float64 {\n\tvar rounder float64\n\tpow := math.Pow(10, float64(prec))\n\tintermed := x * pow\n\t_, frac := math.Modf(intermed)\n\tx = .5\n\tif frac < 0.0 {\n\t\tx = -.5\n\t}\n\tif frac >= x {\n\t\trounder = math.Ceil(intermed)\n\t} else {\n\t\trounder = math.Floor(intermed)\n\t}\n\n\treturn rounder / pow\n}", "func round(f float64) int {\n\tif math.Abs(f) < 0.5 {\n\t\treturn 0\n\t}\n\treturn int(f + math.Copysign(0.5, f))\n}", "func Round(f float64) int {\n\tif math.Abs(f) < 0.5 {\n\t\treturn 0\n\t}\n\treturn int(f + math.Copysign(0.5, f))\n}", "func Round(num, precision float64) float64 {\n\tshift := math.Pow(10, precision)\n\treturn roundInt(num * shift)\n}", "func (d Decimal) Round(places int32) Decimal {\n\t// truncate to places + 1\n\tret := d.rescale(-places - 1)\n\n\t// add sign(d) * 0.5\n\tif ret.value.Sign() < 0 {\n\t\tret.value.Sub(ret.value, fiveInt)\n\t} else {\n\t\tret.value.Add(ret.value, fiveInt)\n\t}\n\n\t// floor for positive numbers, ceil for negative numbers\n\t_, m := ret.value.DivMod(ret.value, tenInt, new(big.Int))\n\tret.exp++\n\tif ret.value.Sign() < 0 && m.Cmp(zeroInt) != 0 {\n\t\tret.value.Add(ret.value, oneInt)\n\t}\n\n\treturn ret\n}", "func (fn *formulaFuncs) round(number, digits float64, mode roundMode) float64 {\n\tvar significance float64\n\tif digits > 0 {\n\t\tsignificance = math.Pow(1/10.0, digits)\n\t} else {\n\t\tsignificance = math.Pow(10.0, -digits)\n\t}\n\tval, res := math.Modf(number / significance)\n\tswitch mode {\n\tcase closest:\n\t\tconst eps = 0.499999999\n\t\tif res >= eps {\n\t\t\tval++\n\t\t} else if res <= -eps {\n\t\t\tval--\n\t\t}\n\tcase down:\n\tcase up:\n\t\tif res > 0 {\n\t\t\tval++\n\t\t} else if res < 0 {\n\t\t\tval--\n\t\t}\n\t}\n\treturn val * significance\n}", "func Round(f float64) float64 {\n\treturn math.Round(f*1000000000) / 1000000000\n}", "func RoundToDigits(v float64, digits int) float64 {\n\tp := math.Pow(10, float64(digits))\n\treturn math.Floor(v*p+0.5) / p\n}", "func Round(n float64, precision int) float64 {\n\tm := math.Pow10(precision)\n\treturn math.Round(n*m) / m\n}", "func round(r float32) int32 {\n\tif r >= 0 {\n\t\treturn int32(r + 0.5)\n\t}\n\n\treturn int32(r - 0.5)\n}", "func Round(x Decimal, unit Decimal, mode RoundingMode) Decimal {\n\tprec := unit.Exponent() * -1\n\n\tswitch mode {\n\tcase RoundDown:\n\t\trounded := x.RoundDown(prec)\n\t\treturn rounded.Sub(rounded.Mod(unit)).Truncate(prec)\n\tcase RoundUp:\n\t\trounded := x.RoundUp(prec)\n\t\treturn rounded.Add(rounded.Mod(unit)).Truncate(prec)\n\tcase RoundToNearest:\n\t\treturn x.RoundNearest(unit).Truncate(prec)\n\t}\n\treturn Decimal{}\n}", "func (d Decimal) Round(places int32) Decimal {\n\tif d.exp == -places {\n\t\treturn d\n\t}\n\t// truncate to places + 1\n\tret := d.rescale(-places - 1)\n\n\t// add sign(d) * 0.5\n\tif ret.value.Sign() < 0 {\n\t\tret.value.Sub(ret.value, fiveInt)\n\t} else {\n\t\tret.value.Add(ret.value, fiveInt)\n\t}\n\n\t// floor for positive numbers, ceil for negative numbers\n\t_, m := ret.value.DivMod(ret.value, tenInt, new(big.Int))\n\tret.exp++\n\tif ret.value.Sign() < 0 && m.Cmp(zeroInt) != 0 {\n\t\tret.value.Add(ret.value, oneInt)\n\t}\n\n\treturn ret\n}", "func Round(val float64, roundOn float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\tif div >= roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func Round(val float64, roundOn float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\tif div >= roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func Round(val float64, roundOn float64, places int) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\tif div >= roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func (g *G) Round() int {\n\t/* Fill in this Function */\n\treturn 1\n}", "func (f Fixed) Round(n int) Fixed {\n\tif f.IsNaN() {\n\t\treturn NaN\n\t}\n\n\tfraction := f.fp % scale\n\tf0 := fraction / int64(math.Pow10(nPlaces-n-1))\n\tdigit := abs(f0 % 10)\n\tf0 = (f0 / 10)\n\tif digit >= 5 {\n\t\tf0 += 1 * sign(f.fp)\n\t}\n\tf0 = f0 * int64(math.Pow10(nPlaces-n))\n\n\tintpart := f.fp - fraction\n\tfp := intpart + f0\n\n\treturn Fixed{fp: fp}\n}", "func round(val float64, roundOn float64, places int ) (newVal float64) {\n\tvar round float64\n\tpow := math.Pow(10, float64(places))\n\tdigit := pow * val\n\t_, div := math.Modf(digit)\n\t_div := math.Copysign(div, val)\n\t_roundOn := math.Copysign(roundOn, val)\n\tif _div >= _roundOn {\n\t\tround = math.Ceil(digit)\n\t} else {\n\t\tround = math.Floor(digit)\n\t}\n\tnewVal = round / pow\n\treturn\n}", "func (a *decimal) Round(nd int) {\n\tif nd < 0 || nd >= a.nd {\n\t\treturn\n\t}\n\tif shouldRoundUp(a, nd) {\n\t\ta.RoundUp(nd)\n\t} else {\n\t\ta.RoundDown(nd)\n\t}\n}", "func ExampleRound() {\n\tds := []time.Duration{\n\t\ttime.Hour + time.Second + 123*time.Millisecond, // 1h0m1.123s\n\t\ttime.Hour + time.Second + time.Microsecond, // 1h0m1.000001s\n\t\t123456789 * time.Nanosecond, // 123.456789ms\n\t\t123456 * time.Nanosecond, // 123.456µs\n\t\t123 * time.Nanosecond, // 123ns\n\t}\n\n\tfmt.Println(\"Duration |0 digits |1 digit |2 digits |3 digits |\")\n\tfmt.Println(\"-------------------------------------------------------------------\")\n\tfor _, d := range ds {\n\t\tfmt.Printf(\"%-14v|\", d)\n\t\tfor digits := 0; digits <= 3; digits++ {\n\t\t\tfmt.Printf(\"%-12v|\", Round(d, digits))\n\t\t}\n\t\tfmt.Println()\n\t}\n\n\t// Output:\n\t// Duration |0 digits |1 digit |2 digits |3 digits |\n\t// -------------------------------------------------------------------\n\t// 1h0m1.123s |1h0m1s |1h0m1.1s |1h0m1.12s |1h0m1.123s |\n\t// 1h0m1.000001s |1h0m1s |1h0m1s |1h0m1s |1h0m1s |\n\t// 123.456789ms |123ms |123.5ms |123.46ms |123.457ms |\n\t// 123.456µs |123µs |123.5µs |123.46µs |123.456µs |\n\t// 123ns |123ns |123ns |123ns |123ns |\n}", "func ROUNDPD(i, mx, x operand.Op) { ctx.ROUNDPD(i, mx, x) }", "func Round(v float64) int {\n\tx := strconv.FormatFloat(v, 'f', 0, 64)\n\tr, err := strconv.Atoi(x)\n\tif err != nil {\n\t\t// Shouldn't happen.\n\t\tlog.Fatalf(\"failed to parse float %f with 0 decimal values as int: %err\", x, err)\n\t}\n\treturn r\n}", "func round(val float64) int32 {\n\tif val < 0 {\n\t\treturn int32(val - 0.5)\n\t}\n\treturn int32(val + 0.5)\n}", "func roundValue(initialValue float64, floor float64) float64 {\n\tb := []byte(strings.Trim(fmt.Sprintf(\"%f\", initialValue), \"0\"))\n\tvalue := initialValue\n\tfor i := len(b)-1; i >= 0; i-- {\n\t\tif b[i] != '.' {\n\t\t\tb[i] = '0'\n\t\t\tround, e := strconv.ParseFloat(string(b), 64)\n\t\t\tif e != nil || round <= floor {\n\t\t\t\treturn value\n\t\t\t}\n\t\t\tvalue = round\n\t\t}\n\t}\n\treturn value\n}", "func RoundFloat64(valor float64, dec int) float64 {\n\tdec = ReturnIf(dec <= 0, 1, dec).(int)\n\tformat := fmt.Sprintf(\".%df\", dec)\n\tformat = \"%\" + format\n\tvalstr := fmt.Sprintf(format, valor)\n\tval, err := StrToFloat64(valstr)\n\tif err != nil {\n\t\treturn 0.00\n\t}\n\treturn val\n\n}", "func (f Fixed) Round(n int) Fixed {\n\tif f.IsNaN() {\n\t\treturn NaN\n\t}\n\n\tround := .5\n\n\tf0 := f.Frac()\n\tf0 = f0*math.Pow10(n) + round\n\tf0 = float64(int(f0)) / math.Pow10(n)\n\n\treturn NewFromFloat(float64(f.UInt()) + f0)\n}", "func (h *hinter) round(x f26dot6) f26dot6 {\n\tif h.roundPeriod == 0 {\n\t\treturn x\n\t}\n\tneg := x < 0\n\tx -= h.roundPhase\n\tx += h.roundThreshold\n\tif x >= 0 {\n\t\tx = (x / h.roundPeriod) * h.roundPeriod\n\t} else {\n\t\tx -= h.roundPeriod\n\t\tx += 1\n\t\tx = (x / h.roundPeriod) * h.roundPeriod\n\t}\n\tx += h.roundPhase\n\tif neg {\n\t\tif x >= 0 {\n\t\t\tx = h.roundPhase - h.roundPeriod\n\t\t}\n\t} else if x < 0 {\n\t\tx = h.roundPhase\n\t}\n\treturn x\n}", "func RoundFixedDecimal(x float64, precision int) float64 {\n\trounded, _ := d.NewFromFloat(x).Round(int32(precision)).Float64()\n\treturn rounded\n}", "func ROUNDSD(i, mx, x operand.Op) { ctx.ROUNDSD(i, mx, x) }", "func (bc ByteCount) ConvertRound(unit ByteCount, precision int) float64 {\n\tp := math.Pow(10, float64(precision))\n\tv := math.Round(p*float64(bc)/float64(unit)) / p\n\treturn v\n}", "func Round64(x float64, n int) float64 {\n\tif math.IsNaN(x) {\n\t\treturn x\n\t}\n\tshift := math.Pow(10, float64(n))\n\treturn math.Floor(x*shift+0.5) / shift\n}", "func (d Duration) Round(m Duration) Duration {\n\tt := time.Duration(d * 1000).Round(time.Duration(m * 1000))\n\treturn Duration(t / 1000)\n}", "func (d *MyDecimal) Round(to *MyDecimal, frac int, roundMode RoundMode) (err error) {\n\t// wordsFracTo is the number of fraction words in buffer.\n\twordsFracTo := (frac + 1) / digitsPerWord\n\tif frac > 0 {\n\t\twordsFracTo = digitsToWords(frac)\n\t}\n\twordsFrac := digitsToWords(int(d.digitsFrac))\n\twordsInt := digitsToWords(int(d.digitsInt))\n\n\troundDigit := int32(roundMode)\n\t/* TODO - fix this code as it won't work for CEILING mode */\n\n\tif wordsInt+wordsFracTo > wordBufLen {\n\t\twordsFracTo = wordBufLen - wordsInt\n\t\tfrac = wordsFracTo * digitsPerWord\n\t\terr = ErrTruncated\n\t}\n\tif int(d.digitsInt)+frac < 0 {\n\t\t*to = zeroMyDecimal\n\t\treturn nil\n\t}\n\tif to != d {\n\t\tcopy(to.wordBuf[:], d.wordBuf[:])\n\t\tto.negative = d.negative\n\t\tto.digitsInt = int8(mathutil.Min(wordsInt, wordBufLen) * digitsPerWord)\n\t}\n\tif wordsFracTo > wordsFrac {\n\t\tidx := wordsInt + wordsFrac\n\t\tfor wordsFracTo > wordsFrac {\n\t\t\twordsFracTo--\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\tif frac >= int(d.digitsFrac) {\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\n\t// Do increment.\n\ttoIdx := wordsInt + wordsFracTo - 1\n\tif frac == wordsFracTo*digitsPerWord {\n\t\tdoInc := false\n\t\tswitch roundMode {\n\t\t// Notice: No support for ceiling mode now.\n\t\tcase ModeCeiling:\n\t\t\t// If any word after scale is not zero, do increment.\n\t\t\t// e.g ceiling 3.0001 to scale 1, gets 3.1\n\t\t\tidx := toIdx + (wordsFrac - wordsFracTo)\n\t\t\tfor idx > toIdx {\n\t\t\t\tif d.wordBuf[idx] != 0 {\n\t\t\t\t\tdoInc = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tidx--\n\t\t\t}\n\t\tcase ModeHalfUp:\n\t\t\tdigAfterScale := d.wordBuf[toIdx+1] / digMask // the first digit after scale.\n\t\t\t// If first digit after scale is equal to or greater than 5, do increment.\n\t\t\tdoInc = digAfterScale >= 5\n\t\tcase ModeTruncate:\n\t\t\t// Never round, just truncate.\n\t\t\tdoInc = false\n\t\t}\n\t\tif doInc {\n\t\t\tif toIdx >= 0 {\n\t\t\t\tto.wordBuf[toIdx]++\n\t\t\t} else {\n\t\t\t\ttoIdx++\n\t\t\t\tto.wordBuf[toIdx] = wordBase\n\t\t\t}\n\t\t} else if wordsInt+wordsFracTo == 0 {\n\t\t\t*to = zeroMyDecimal\n\t\t\treturn nil\n\t\t}\n\t} else {\n\t\t/* TODO - fix this code as it won't work for CEILING mode */\n\t\tpos := wordsFracTo*digitsPerWord - frac - 1\n\t\tshiftedNumber := to.wordBuf[toIdx] / powers10[pos]\n\t\tdigAfterScale := shiftedNumber % 10\n\t\tif digAfterScale > roundDigit || (roundDigit == 5 && digAfterScale == 5) {\n\t\t\tshiftedNumber += 10\n\t\t}\n\t\tto.wordBuf[toIdx] = powers10[pos] * (shiftedNumber - digAfterScale)\n\t}\n\t/*\n\t In case we're rounding e.g. 1.5e9 to 2.0e9, the decimal words inside\n\t the buffer are as follows.\n\n\t Before <1, 5e8>\n\t After <2, 5e8>\n\n\t Hence we need to set the 2nd field to 0.\n\t The same holds if we round 1.5e-9 to 2e-9.\n\t*/\n\tif wordsFracTo < wordsFrac {\n\t\tidx := wordsInt + wordsFracTo\n\t\tif frac == 0 && wordsInt == 0 {\n\t\t\tidx = 1\n\t\t}\n\t\tfor idx < wordBufLen {\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t}\n\n\t// Handle carry.\n\tvar carry int32\n\tif to.wordBuf[toIdx] >= wordBase {\n\t\tcarry = 1\n\t\tto.wordBuf[toIdx] -= wordBase\n\t\tfor carry == 1 && toIdx > 0 {\n\t\t\ttoIdx--\n\t\t\tto.wordBuf[toIdx], carry = add(to.wordBuf[toIdx], 0, carry)\n\t\t}\n\t\tif carry > 0 {\n\t\t\tif wordsInt+wordsFracTo >= wordBufLen {\n\t\t\t\twordsFracTo--\n\t\t\t\tfrac = wordsFracTo * digitsPerWord\n\t\t\t\terr = ErrTruncated\n\t\t\t}\n\t\t\tfor toIdx = wordsInt + mathutil.Max(wordsFracTo, 0); toIdx > 0; toIdx-- {\n\t\t\t\tif toIdx < wordBufLen {\n\t\t\t\t\tto.wordBuf[toIdx] = to.wordBuf[toIdx-1]\n\t\t\t\t} else {\n\t\t\t\t\terr = ErrOverflow\n\t\t\t\t}\n\t\t\t}\n\t\t\tto.wordBuf[toIdx] = 1\n\t\t\t/* We cannot have more than 9 * 9 = 81 digits. */\n\t\t\tif int(to.digitsInt) < digitsPerWord*wordBufLen {\n\t\t\t\tto.digitsInt++\n\t\t\t} else {\n\t\t\t\terr = ErrOverflow\n\t\t\t}\n\t\t}\n\t} else {\n\t\tfor {\n\t\t\tif to.wordBuf[toIdx] != 0 {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif toIdx == 0 {\n\t\t\t\t/* making 'zero' with the proper scale */\n\t\t\t\tidx := wordsFracTo + 1\n\t\t\t\tto.digitsInt = 1\n\t\t\t\tto.digitsFrac = int8(mathutil.Max(frac, 0))\n\t\t\t\tto.negative = false\n\t\t\t\tfor toIdx < idx {\n\t\t\t\t\tto.wordBuf[toIdx] = 0\n\t\t\t\t\ttoIdx++\n\t\t\t\t}\n\t\t\t\tto.resultFrac = to.digitsFrac\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\ttoIdx--\n\t\t}\n\t}\n\t/* Here we check 999.9 -> 1000 case when we need to increase intDigCnt */\n\tfirstDig := mod9[to.digitsInt]\n\tif firstDig > 0 && to.wordBuf[toIdx] >= powers10[firstDig] {\n\t\tto.digitsInt++\n\t}\n\tif frac < 0 {\n\t\tfrac = 0\n\t}\n\tto.digitsFrac = int8(frac)\n\tto.resultFrac = to.digitsFrac\n\treturn\n}", "func RoundFloat(num float64) int {\n\treturn int(num + math.Copysign(0.5, num))\n}", "func RoundFloat(f float64) float64 {\n\tif math.Remainder(f, 1.0) < 0 {\n\t\treturn math.Ceil(f)\n\t}\n\treturn math.Floor(f)\n}", "func RoundP(x float64, p int) float64 {\n\tk := math.Pow10(p)\n\treturn math.Floor(x*k+0.5) / k\n}", "func roundNumber(number int) int {\n\trounded := ((number / 1000) * 1000) + 1000\n\treturn rounded\n}", "func (g I915Backend) Round(value int) int {\n\tconst i915Alignment = 16\n\t// Inspired by Chromium's base/bits.h:Align() function.\n\treturn (value + i915Alignment - 1) & ^(i915Alignment - 1)\n}", "func (g GenericBackend) Round(value int) int {\n\tconst genericAlignment = 16\n\t// Inspired by Chromium's base/bits.h:Align() function.\n\treturn (value + genericAlignment - 1) & ^(genericAlignment - 1)\n}", "func FloatRound(f float64, n int) float64 {\r\n\tpow10_n := math.Pow10(n)\r\n\toffset := 0.5\r\n\tif f < 0 {\r\n\t\toffset = -offset\r\n\t}\r\n\treturn math.Trunc((f+offset/pow10_n)*pow10_n) / pow10_n\r\n}", "func (d *MyDecimal) Round(to *MyDecimal, frac int, roundMode RoundMode) (err error) {\n\ttrace_util_0.Count(_mydecimal_00000, 195)\n\t// wordsFracTo is the number of fraction words in buffer.\n\twordsFracTo := (frac + 1) / digitsPerWord\n\tif frac > 0 {\n\t\ttrace_util_0.Count(_mydecimal_00000, 207)\n\t\twordsFracTo = digitsToWords(frac)\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 196)\n\twordsFrac := digitsToWords(int(d.digitsFrac))\n\twordsInt := digitsToWords(int(d.digitsInt))\n\n\troundDigit := int32(roundMode)\n\t/* TODO - fix this code as it won't work for CEILING mode */\n\n\tif wordsInt+wordsFracTo > wordBufLen {\n\t\ttrace_util_0.Count(_mydecimal_00000, 208)\n\t\twordsFracTo = wordBufLen - wordsInt\n\t\tfrac = wordsFracTo * digitsPerWord\n\t\terr = ErrTruncated\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 197)\n\tif int(d.digitsInt)+frac < 0 {\n\t\ttrace_util_0.Count(_mydecimal_00000, 209)\n\t\t*to = zeroMyDecimal\n\t\treturn nil\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 198)\n\tif to != d {\n\t\ttrace_util_0.Count(_mydecimal_00000, 210)\n\t\tcopy(to.wordBuf[:], d.wordBuf[:])\n\t\tto.negative = d.negative\n\t\tto.digitsInt = int8(myMin(wordsInt, wordBufLen) * digitsPerWord)\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 199)\n\tif wordsFracTo > wordsFrac {\n\t\ttrace_util_0.Count(_mydecimal_00000, 211)\n\t\tidx := wordsInt + wordsFrac\n\t\tfor wordsFracTo > wordsFrac {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 213)\n\t\t\twordsFracTo--\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 212)\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 200)\n\tif frac >= int(d.digitsFrac) {\n\t\ttrace_util_0.Count(_mydecimal_00000, 214)\n\t\tto.digitsFrac = int8(frac)\n\t\tto.resultFrac = to.digitsFrac\n\t\treturn\n\t}\n\n\t// Do increment.\n\ttrace_util_0.Count(_mydecimal_00000, 201)\n\ttoIdx := wordsInt + wordsFracTo - 1\n\tif frac == wordsFracTo*digitsPerWord {\n\t\ttrace_util_0.Count(_mydecimal_00000, 215)\n\t\tdoInc := false\n\t\tswitch roundMode {\n\t\t// Notice: No support for ceiling mode now.\n\t\tcase modeCeiling:\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 217)\n\t\t\t// If any word after scale is not zero, do increment.\n\t\t\t// e.g ceiling 3.0001 to scale 1, gets 3.1\n\t\t\tidx := toIdx + (wordsFrac - wordsFracTo)\n\t\t\tfor idx > toIdx {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 220)\n\t\t\t\tif d.wordBuf[idx] != 0 {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 222)\n\t\t\t\t\tdoInc = true\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 221)\n\t\t\t\tidx--\n\t\t\t}\n\t\tcase ModeHalfEven:\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 218)\n\t\t\tdigAfterScale := d.wordBuf[toIdx+1] / digMask // the first digit after scale.\n\t\t\t// If first digit after scale is 5 and round even, do increment if digit at scale is odd.\n\t\t\tdoInc = (digAfterScale > 5) || (digAfterScale == 5)\n\t\tcase ModeTruncate:\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 219)\n\t\t\t// Never round, just truncate.\n\t\t\tdoInc = false\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 216)\n\t\tif doInc {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 223)\n\t\t\tif toIdx >= 0 {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 224)\n\t\t\t\tto.wordBuf[toIdx]++\n\t\t\t} else {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 225)\n\t\t\t\t{\n\t\t\t\t\ttoIdx++\n\t\t\t\t\tto.wordBuf[toIdx] = wordBase\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 226)\n\t\t\tif wordsInt+wordsFracTo == 0 {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 227)\n\t\t\t\t*to = zeroMyDecimal\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\t} else {\n\t\ttrace_util_0.Count(_mydecimal_00000, 228)\n\t\t{\n\t\t\t/* TODO - fix this code as it won't work for CEILING mode */\n\t\t\tpos := wordsFracTo*digitsPerWord - frac - 1\n\t\t\tshiftedNumber := to.wordBuf[toIdx] / powers10[pos]\n\t\t\tdigAfterScale := shiftedNumber % 10\n\t\t\tif digAfterScale > roundDigit || (roundDigit == 5 && digAfterScale == 5) {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 230)\n\t\t\t\tshiftedNumber += 10\n\t\t\t}\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 229)\n\t\t\tto.wordBuf[toIdx] = powers10[pos] * (shiftedNumber - digAfterScale)\n\t\t}\n\t}\n\t/*\n\t In case we're rounding e.g. 1.5e9 to 2.0e9, the decimal words inside\n\t the buffer are as follows.\n\n\t Before <1, 5e8>\n\t After <2, 5e8>\n\n\t Hence we need to set the 2nd field to 0.\n\t The same holds if we round 1.5e-9 to 2e-9.\n\t*/\n\ttrace_util_0.Count(_mydecimal_00000, 202)\n\tif wordsFracTo < wordsFrac {\n\t\ttrace_util_0.Count(_mydecimal_00000, 231)\n\t\tidx := wordsInt + wordsFracTo\n\t\tif frac == 0 && wordsInt == 0 {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 233)\n\t\t\tidx = 1\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 232)\n\t\tfor idx < wordBufLen {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 234)\n\t\t\tto.wordBuf[idx] = 0\n\t\t\tidx++\n\t\t}\n\t}\n\n\t// Handle carry.\n\ttrace_util_0.Count(_mydecimal_00000, 203)\n\tvar carry int32\n\tif to.wordBuf[toIdx] >= wordBase {\n\t\ttrace_util_0.Count(_mydecimal_00000, 235)\n\t\tcarry = 1\n\t\tto.wordBuf[toIdx] -= wordBase\n\t\tfor carry == 1 && toIdx > 0 {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 237)\n\t\t\ttoIdx--\n\t\t\tto.wordBuf[toIdx], carry = add(to.wordBuf[toIdx], 0, carry)\n\t\t}\n\t\ttrace_util_0.Count(_mydecimal_00000, 236)\n\t\tif carry > 0 {\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 238)\n\t\t\tif wordsInt+wordsFracTo >= wordBufLen {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 241)\n\t\t\t\twordsFracTo--\n\t\t\t\tfrac = wordsFracTo * digitsPerWord\n\t\t\t\terr = ErrTruncated\n\t\t\t}\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 239)\n\t\t\tfor toIdx = wordsInt + myMax(wordsFracTo, 0); toIdx > 0; toIdx-- {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 242)\n\t\t\t\tif toIdx < wordBufLen {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 243)\n\t\t\t\t\tto.wordBuf[toIdx] = to.wordBuf[toIdx-1]\n\t\t\t\t} else {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 244)\n\t\t\t\t\t{\n\t\t\t\t\t\terr = ErrOverflow\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\ttrace_util_0.Count(_mydecimal_00000, 240)\n\t\t\tto.wordBuf[toIdx] = 1\n\t\t\t/* We cannot have more than 9 * 9 = 81 digits. */\n\t\t\tif int(to.digitsInt) < digitsPerWord*wordBufLen {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 245)\n\t\t\t\tto.digitsInt++\n\t\t\t} else {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 246)\n\t\t\t\t{\n\t\t\t\t\terr = ErrOverflow\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else {\n\t\ttrace_util_0.Count(_mydecimal_00000, 247)\n\t\t{\n\t\t\tfor {\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 248)\n\t\t\t\tif to.wordBuf[toIdx] != 0 {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 251)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 249)\n\t\t\t\tif toIdx == 0 {\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 252)\n\t\t\t\t\t/* making 'zero' with the proper scale */\n\t\t\t\t\tidx := wordsFracTo + 1\n\t\t\t\t\tto.digitsInt = 1\n\t\t\t\t\tto.digitsFrac = int8(myMax(frac, 0))\n\t\t\t\t\tto.negative = false\n\t\t\t\t\tfor toIdx < idx {\n\t\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 254)\n\t\t\t\t\t\tto.wordBuf[toIdx] = 0\n\t\t\t\t\t\ttoIdx++\n\t\t\t\t\t}\n\t\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 253)\n\t\t\t\t\tto.resultFrac = to.digitsFrac\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\ttrace_util_0.Count(_mydecimal_00000, 250)\n\t\t\t\ttoIdx--\n\t\t\t}\n\t\t}\n\t}\n\t/* Here we check 999.9 -> 1000 case when we need to increase intDigCnt */\n\ttrace_util_0.Count(_mydecimal_00000, 204)\n\tfirstDig := mod9[to.digitsInt]\n\tif firstDig > 0 && to.wordBuf[toIdx] >= powers10[firstDig] {\n\t\ttrace_util_0.Count(_mydecimal_00000, 255)\n\t\tto.digitsInt++\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 205)\n\tif frac < 0 {\n\t\ttrace_util_0.Count(_mydecimal_00000, 256)\n\t\tfrac = 0\n\t}\n\ttrace_util_0.Count(_mydecimal_00000, 206)\n\tto.digitsFrac = int8(frac)\n\tto.resultFrac = to.digitsFrac\n\treturn\n}", "func (self *State)Round(a any)any{\n self.IncOperations(self.coeff[\"round\"]+self.off[\"round\"])\n return wrap1(a,math.Round)\n}", "func ROUNDPS(i, mx, x operand.Op) { ctx.ROUNDPS(i, mx, x) }", "func round(input float64, duration float64) float64 {\n\treturn math.Sin(math.Pi * input / duration)\n}", "func (i ID) Round() uint64 {\n\treturn i.Raw() >> (64 - BitwidthRound)\n}", "func (s Size) Round() Size {\n\tfor _, unit := range allUnits {\n\t\tif s >= unit {\n\t\t\treturn Size(math.Round(float64(s)/float64(unit))) * unit\n\t\t}\n\t}\n\treturn s\n}", "func RoundFloat64(number float64) int {\n\t// Truncate the float and use this to get the decimal component\n\ttruncNum := math.Trunc(number)\n\tdecimalComp := number - truncNum\n\n\tvar result float64\n\tif decimalComp >= 0.5 {\n\t\t// Use Ceil\n\t\tresult = math.Ceil(number)\n\t} else {\n\t\t// Use Floor\n\t\tresult = math.Floor(number)\n\t}\n\n\treturn int(result)\n}", "func (vrfs *VRFShare) GetRoundNumber() int64 {\n\treturn vrfs.Round\n}", "func RoundNumber(number float64, decimals int) float64 {\n\tprecision := 1.0\n\tfor i := 0; i < decimals; i++ {\n\t\tprecision = precision * 10\n\t}\n\troundedNum := math.Round(number*precision) / precision\n\n\treturn roundedNum\n}", "func (t *timeDataType) Round(d time.Duration) *timeDataType {\n\treturn t.Formatter(func(t time.Time) time.Time {\n\t\treturn t.Round(d)\n\t})\n}", "func (x Bits) round(prec uint, mode RoundingMode) *Float {\n\tx = x.norm()\n\n\t// determine range\n\tvar min, max int\n\tfor i, b := range x {\n\t\tif i == 0 || b < min {\n\t\t\tmin = b\n\t\t}\n\t\tif i == 0 || b > max {\n\t\t\tmax = b\n\t\t}\n\t}\n\tprec0 := uint(max + 1 - min)\n\tif prec >= prec0 {\n\t\treturn x.Float()\n\t}\n\t// prec < prec0\n\n\t// determine bit 0, rounding, and sticky bit, and result bits z\n\tvar bit0, rbit, sbit uint\n\tvar z Bits\n\tr := max - int(prec)\n\tfor _, b := range x {\n\t\tswitch {\n\t\tcase b == r:\n\t\t\trbit = 1\n\t\tcase b < r:\n\t\t\tsbit = 1\n\t\tdefault:\n\t\t\t// b > r\n\t\t\tif b == r+1 {\n\t\t\t\tbit0 = 1\n\t\t\t}\n\t\t\tz = append(z, b)\n\t\t}\n\t}\n\n\t// round\n\tf := z.Float() // rounded to zero\n\tif mode == ToNearestAway {\n\t\tpanic(\"not yet implemented\")\n\t}\n\tif mode == ToNearestEven && rbit == 1 && (sbit == 1 || sbit == 0 && bit0 != 0) || mode == AwayFromZero {\n\t\t// round away from zero\n\t\tf.SetMode(ToZero).SetPrec(prec)\n\t\tf.Add(f, Bits{int(r) + 1}.Float())\n\t}\n\treturn f\n}", "func (d Decimal) DivRound(d2 Decimal, precision int32) Decimal {\n\t// QuoRem already checks initialization\n\tq, r := d.QuoRem(d2, precision)\n\t// the actual rounding decision is based on comparing r*10^precision and d2/2\n\t// instead compare 2 r 10 ^precision and d2\n\tvar rv2 big.Int\n\trv2.Abs(r.value)\n\trv2.Lsh(&rv2, 1)\n\t// now rv2 = abs(r.value) * 2\n\tr2 := Decimal{value: &rv2, exp: r.exp + precision}\n\t// r2 is now 2 * r * 10 ^ precision\n\tvar c = r2.Cmp(d2.Abs())\n\n\tif c < 0 {\n\t\treturn q\n\t}\n\n\tif d.value.Sign()*d2.value.Sign() < 0 {\n\t\treturn q.Sub(New(1, -precision))\n\t}\n\n\treturn q.Add(New(1, -precision))\n}", "func Root4(in *big.Float) *big.Float {\n\treturn Root(in, 4)\n}", "func (d Decimal) DivRound(d2 Decimal, precision int32) Decimal {\n\t// QuoRem already checks initialization\n\tq, r := d.QuoRem(d2, precision)\n\t// the actual rounding decision is based on comparing r*10^precision and d2/2\n\t// instead compare 2 r 10 ^precision and d2\n\tvar rv2 big.Int\n\trv2.Abs(r.value)\n\trv2.Lsh(&rv2, 1)\n\t// now rv2 = abs(r.value) * 2\n\tr2 := Decimal{value: &rv2, exp: r.exp + precision}\n\t// r2 is now 2 * r * 10 ^ precision\n\tvar c = r2.Cmp(d2.Abs())\n\n\tif c < 0 {\n\t\treturn q\n\t}\n\n\tif d.value.Sign()*d2.value.Sign() < 0 {\n\t\treturn q.Sub(NewDec(1, -precision))\n\t}\n\n\treturn q.Add(NewDec(1, -precision))\n}", "func roundTheTimestamp(timestamp, precision int64) int64 {\n\tfloor := math.Floor(float64(timestamp) / float64(precision))\n\treturn int64(floor) * precision\n}", "func (a *decimal) RoundDown(nd int) {\n\tif nd < 0 || nd >= a.nd {\n\t\treturn\n\t}\n\ta.nd = nd\n\ttrim(a)\n}", "func (x Vector64) Round(n int) Vector64 {\n\tfor i := 0; i < len(x); i++ {\n\t\tx[i] = Round64(x[i], n)\n\t}\n\treturn x\n}", "func (commit *Commit) Round() int {\n\tif len(commit.Precommits) == 0 {\n\t\treturn 0\n\t}\n\treturn commit.FirstPrecommit().Vote.Round\n}", "func (m mathUtil) RoundDown(value, roundTo float64) float64 {\n\td1 := math.Floor(value / roundTo)\n\treturn d1 * roundTo\n}", "func (fn *formulaFuncs) ROUND(argsList *list.List) formulaArg {\n\tif argsList.Len() != 2 {\n\t\treturn newErrorFormulaArg(formulaErrorVALUE, \"ROUND requires 2 numeric arguments\")\n\t}\n\tnumber := argsList.Front().Value.(formulaArg).ToNumber()\n\tif number.Type == ArgError {\n\t\treturn number\n\t}\n\tdigits := argsList.Back().Value.(formulaArg).ToNumber()\n\tif digits.Type == ArgError {\n\t\treturn digits\n\t}\n\treturn newNumberFormulaArg(fn.round(number.Number, digits.Number, closest))\n}", "func (m mathUtil) RoundUp(value, roundTo float64) float64 {\n\td1 := math.Ceil(value / roundTo)\n\treturn d1 * roundTo\n}", "func (c *Context) ROUNDPD(i, mx, x operand.Op) {\n\tc.addinstruction(x86.ROUNDPD(i, mx, x))\n}", "func (a *decimal) RoundUp(nd int) {\n\tif nd < 0 || nd >= a.nd {\n\t\treturn\n\t}\n\n\t// round up\n\tfor i := nd - 1; i >= 0; i-- {\n\t\tc := a.d[i]\n\t\tif c < '9' { // can stop after this digit\n\t\t\ta.d[i]++\n\t\t\ta.nd = i + 1\n\t\t\treturn\n\t\t}\n\t}\n\n\t// Number is all 9s.\n\t// Change to single 1 with adjusted decimal point.\n\ta.d[0] = '1'\n\ta.nd = 1\n\ta.dp++\n}", "func Round(scope *Scope, x tf.Output) (y tf.Output) {\n\tif scope.Err() != nil {\n\t\treturn\n\t}\n\topspec := tf.OpSpec{\n\t\tType: \"Round\",\n\t\tInput: []tf.Input{\n\t\t\tx,\n\t\t},\n\t}\n\top := scope.AddOperation(opspec)\n\treturn op.Output(0)\n}", "func (h fractionalHex) Round() hex {\n\n\troundToInt := func(a float64) int {\n\t\tif a < 0 {\n\t\t\treturn int(a - 0.5)\n\t\t}\n\t\treturn int(a + 0.5)\n\t}\n\n\tq := roundToInt(h.q)\n\tr := roundToInt(h.r)\n\ts := roundToInt(h.s)\n\n\tq_diff := math.Abs(float64(q) - h.q)\n\tr_diff := math.Abs(float64(r) - h.r)\n\ts_diff := math.Abs(float64(s) - h.s)\n\n\tif q_diff > r_diff && q_diff > s_diff {\n\t\tq = -r - s\n\t} else if r_diff > s_diff {\n\t\tr = -q - s\n\t} else {\n\t\ts = -q - r\n\t}\n\treturn hex{q, r, s}\n\n}", "func RoundDown(d decimal.Decimal, precision int32) decimal.Decimal {\n\n\treturn d.Truncate(precision)\n}", "func (commit *Commit) GetRound() int32 {\n\treturn commit.Round\n}", "func Iround(v float64) int {\n\tif v >= 0 {\n\t\treturn int(v + 0.5)\n\t}\n\treturn int(v - 0.5)\n}", "func RoundFloat(x float32) float32 {\r\n\treturn float32(math.Round(float64(x)))\r\n}", "func (d Decimal) RoundFloor(places int32) Decimal {\n\tif d.exp >= -places {\n\t\treturn d\n\t}\n\n\trescaled := d.rescale(-places)\n\tif d.Equal(rescaled) {\n\t\treturn d\n\t}\n\n\tif d.value.Sign() < 0 {\n\t\trescaled.value.Sub(rescaled.value, oneInt)\n\t}\n\n\treturn rescaled\n}", "func TestFloat64(t *testing.T) {\n\tvalor := 12.34661\n\tt.Logf(\"valor:[%f]\", valor)\n\tvalor = utl.RoundFloat64(valor, 2)\n\tt.Logf(\"valor:[%f]\", valor)\n}", "func Pow4(in *big.Float) *big.Float {\n\treturn Pow(in, 4)\n}", "func ROUNDSS(i, mx, x operand.Op) { ctx.ROUNDSS(i, mx, x) }", "func (mm *BytesMonitor) roundSize(sz int64) int64 {\n\tconst maxRoundSize = 4 << 20 // 4 MB\n\tif sz >= maxRoundSize {\n\t\t// Don't round the size up if the allocation is large. This also avoids\n\t\t// edge cases in the math below if sz == math.MaxInt64.\n\t\treturn sz\n\t}\n\tchunks := (sz + mm.poolAllocationSize - 1) / mm.poolAllocationSize\n\treturn chunks * mm.poolAllocationSize\n}", "func roundDuration(d time.Duration) time.Duration {\n\trd := time.Duration(d.Minutes()) * time.Minute\n\tif rd < d {\n\t\trd += time.Minute\n\t}\n\treturn rd\n}", "func (d Decimal) RoundCeil(places int32) Decimal {\n\tif d.exp >= -places {\n\t\treturn d\n\t}\n\n\trescaled := d.rescale(-places)\n\tif d.Equal(rescaled) {\n\t\treturn d\n\t}\n\n\tif d.value.Sign() > 0 {\n\t\trescaled.value.Add(rescaled.value, oneInt)\n\t}\n\n\treturn rescaled\n}", "func roundRandom(f float64, rng *rand.Rand) int {\n\tr := f - math.Floor(f)\n\tp := rng.Float64()\n\tif p > r {\n\t\treturn int(f)\n\t}\n\treturn int(f) + 1\n}", "func truncate(num float64) float64 {\n\tvar truncated = math.Round(num*100) / 100\n\treturn truncated\n}" ]
[ "0.69121134", "0.67635113", "0.67551464", "0.67481613", "0.6728861", "0.66506904", "0.64836895", "0.6469782", "0.64390737", "0.64390737", "0.64390737", "0.6347989", "0.63399285", "0.6334366", "0.63302547", "0.63215435", "0.6301803", "0.6291642", "0.6265966", "0.6265745", "0.6244176", "0.62431294", "0.615218", "0.6133822", "0.60772234", "0.6023781", "0.60136366", "0.59982777", "0.5997826", "0.5993685", "0.5962617", "0.5937107", "0.5936066", "0.58615166", "0.5834773", "0.5834773", "0.5834773", "0.577966", "0.57754433", "0.57730234", "0.57673734", "0.57467026", "0.57051456", "0.56809545", "0.5654619", "0.56498444", "0.5636154", "0.56335026", "0.55923444", "0.55194294", "0.54637194", "0.54299855", "0.5428534", "0.5427941", "0.54145575", "0.53842765", "0.53635", "0.52984446", "0.52885747", "0.52711046", "0.5270728", "0.5259221", "0.5245844", "0.5242691", "0.5175556", "0.51753926", "0.51729083", "0.5166301", "0.5151412", "0.5133493", "0.5120978", "0.5108568", "0.50983196", "0.5076042", "0.5074418", "0.50676316", "0.50648606", "0.50492185", "0.50483453", "0.50071585", "0.500672", "0.49986792", "0.49962643", "0.49950716", "0.49636945", "0.49305576", "0.49085346", "0.48937237", "0.48924348", "0.48778263", "0.4872846", "0.48593223", "0.48515397", "0.4851181", "0.48314676", "0.4822408", "0.48219073", "0.48151258", "0.47861964", "0.47809577" ]
0.7828499
0
Does the necessary housekeeping to fork a document.
func (d *Doc) PreCopy() { d.Ancestors = append(d.Ancestors, d.DocId) d.DocId = bson.NewObjectId() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (ctx *PlikContext) Fork(name string) (fork *PlikContext) {\n\tfork = new(PlikContext)\n\tfork.Context = ctx.Context.Fork(name)\n\tfork.Logger = ctx.Logger.Copy()\n\treturn fork\n}", "func ForkParent(loggedInUser, dbOwner, dbFolder, dbName string) (parentOwner, parentFolder, parentDBName string, err error) {\n\tdbQuery := `\n\t\tSELECT users.user_name, db.folder, db.db_name, db.public, db.db_id, db.forked_from, db.is_deleted\n\t\tFROM sqlite_databases AS db, users\n\t\tWHERE db.root_database = (\n\t\t\t\tSELECT root_database\n\t\t\t\tFROM sqlite_databases\n\t\t\t\tWHERE user_id = (\n\t\t\t\t\t\tSELECT user_id\n\t\t\t\t\t\tFROM users\n\t\t\t\t\t\tWHERE lower(user_name) = lower($1)\n\t\t\t\t\t)\n\t\t\t\t\tAND folder = $2\n\t\t\t\t\tAND db_name = $3\n\t\t\t\t)\n\t\t\tAND db.user_id = users.user_id\n\t\tORDER BY db.forked_from NULLS FIRST`\n\trows, err := pdb.Query(dbQuery, dbOwner, dbFolder, dbName)\n\tif err != nil {\n\t\tlog.Printf(\"Database query failed: %v\\n\", err)\n\t\treturn\n\t}\n\tdefer rows.Close()\n\tdbList := make(map[int]ForkEntry)\n\tfor rows.Next() {\n\t\tvar frk pgx.NullInt64\n\t\tvar oneRow ForkEntry\n\t\terr = rows.Scan(&oneRow.Owner, &oneRow.Folder, &oneRow.DBName, &oneRow.Public, &oneRow.ID, &frk, &oneRow.Deleted)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error retrieving fork parent for '%s%s%s': %v\\n\", dbOwner, dbFolder, dbName,\n\t\t\t\terr)\n\t\t\treturn\n\t\t}\n\t\tif frk.Valid {\n\t\t\toneRow.ForkedFrom = int(frk.Int64)\n\t\t}\n\t\tdbList[oneRow.ID] = oneRow\n\t}\n\n\t// Safety check\n\tnumResults := len(dbList)\n\tif numResults == 0 {\n\t\terr = fmt.Errorf(\"Empty list returned instead of fork tree. This shouldn't happen\")\n\t\treturn\n\t}\n\n\t// Get the ID of the database being called\n\tdbID, err := databaseID(dbOwner, dbFolder, dbName)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t// Find the closest (not-deleted) parent for the database\n\tdbEntry, ok := dbList[dbID]\n\tif !ok {\n\t\t// The database itself wasn't found in the list. This shouldn't happen\n\t\terr = fmt.Errorf(\"Internal error when retrieving fork parent info. This shouldn't happen.\")\n\t\treturn\n\t}\n\tfor dbEntry.ForkedFrom != 0 {\n\t\tdbEntry, ok = dbList[dbEntry.ForkedFrom]\n\t\tif !ok {\n\t\t\t// Parent database entry wasn't found in the list. This shouldn't happen either\n\t\t\terr = fmt.Errorf(\"Internal error when retrieving fork parent info (#2). This shouldn't happen.\")\n\t\t\treturn\n\t\t}\n\t\tif !dbEntry.Deleted {\n\t\t\t// Found a parent (that's not deleted). We'll use this and stop looping\n\t\t\tparentOwner = dbEntry.Owner\n\t\t\tparentFolder = dbEntry.Folder\n\t\t\tparentDBName = dbEntry.DBName\n\t\t\tbreak\n\t\t}\n\t}\n\treturn\n}", "func ForkDatabase(srcOwner, dbFolder, dbName, dstOwner string) (newForkCount int, err error) {\n\t// Copy the main database entry\n\tdbQuery := `\n\t\tWITH dst_u AS (\n\t\t\tSELECT user_id\n\t\t\tFROM users\n\t\t\tWHERE lower(user_name) = lower($1)\n\t\t)\n\t\tINSERT INTO sqlite_databases (user_id, folder, db_name, public, forks, one_line_description, full_description,\n\t\t\tbranches, contributors, root_database, default_table, source_url, commit_list, branch_heads, tags,\n\t\t\tdefault_branch, forked_from)\n\t\tSELECT dst_u.user_id, folder, db_name, public, 0, one_line_description, full_description, branches,\n\t\t\tcontributors, root_database, default_table, source_url, commit_list, branch_heads, tags, default_branch,\n\t\t\tdb_id\n\t\tFROM sqlite_databases, dst_u\n\t\tWHERE sqlite_databases.user_id = (\n\t\t\t\tSELECT user_id\n\t\t\t\tFROM users\n\t\t\t\tWHERE lower(user_name) = lower($2)\n\t\t\t)\n\t\t\tAND folder = $3\n\t\t\tAND db_name = $4`\n\tcommandTag, err := pdb.Exec(dbQuery, dstOwner, srcOwner, dbFolder, dbName)\n\tif err != nil {\n\t\tlog.Printf(\"Forking database '%s%s%s' in PostgreSQL failed: %v\\n\", srcOwner, dbFolder, dbName, err)\n\t\treturn 0, err\n\t}\n\tif numRows := commandTag.RowsAffected(); numRows != 1 {\n\t\tlog.Printf(\"Wrong number of rows affected (%d) when forking main database entry: \"+\n\t\t\t\"'%s%s%s' to '%s%s%s'\\n\", numRows, srcOwner, dbFolder, dbName, dstOwner, dbFolder, dbName)\n\t}\n\n\t// Update the fork count for the root database\n\tdbQuery = `\n\t\tWITH root_db AS (\n\t\t\tSELECT root_database AS id\n\t\t\tFROM sqlite_databases\n\t\t\tWHERE user_id = (\n\t\t\t\t\tSELECT user_id\n\t\t\t\t\tFROM users\n\t\t\t\t\tWHERE lower(user_name) = lower($1)\n\t\t\t\t)\n\t\t\t\tAND folder = $2\n\t\t\t\tAND db_name = $3\n\t\t), new_count AS (\n\t\t\tSELECT count(*) AS forks\n\t\t\tFROM sqlite_databases AS db, root_db\n\t\t\tWHERE db.root_database = root_db.id\n\t\t\tAND db.is_deleted = false\n\t\t)\n\t\tUPDATE sqlite_databases\n\t\tSET forks = new_count.forks - 1\n\t\tFROM new_count, root_db\n\t\tWHERE sqlite_databases.db_id = root_db.id\n\t\tRETURNING new_count.forks - 1`\n\terr = pdb.QueryRow(dbQuery, dstOwner, dbFolder, dbName).Scan(&newForkCount)\n\tif err != nil {\n\t\tlog.Printf(\"Updating fork count in PostgreSQL failed: %v\\n\", err)\n\t\treturn 0, err\n\t}\n\treturn newForkCount, nil\n}", "func Fork(ctr *container.Container, args []string, detach bool) error {\n\tctr.SetHostname()\n\t// set network\n\tunset, err := ctr.SetNetworkNamespace()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"can't set network namespace\")\n\t}\n\tdefer unset()\n\n\tif err := ctr.LoadCGroups(); err != nil {\n\t\treturn errors.Wrap(err, \"can't initialize cgroups\")\n\t}\n\tif err := changeRoot(ctr.RootFS, ctr.Config.WorkingDir); err != nil {\n\t\treturn err\n\t}\n\n\t// Mount necessaries\n\tmountPoints := []filesystem.MountOption{\n\t\t{Source: \"proc\", Target: \"proc\", Type: \"proc\"},\n\t\t{Source: \"sysfs\", Target: \"sys\", Type: \"sysfs\"},\n\t}\n\tunmount, err := filesystem.Mount(mountPoints...)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer unmount()\n\n\tcommand, argv := cmdAndArgs(ctr.Config.Cmd)\n\tif len(args) > 0 {\n\t\tcommand, argv = cmdAndArgs(args)\n\t}\n\tnewCmd := exec.Command(command, argv...)\n\tnewCmd.Stdin = os.Stdin\n\tnewCmd.Stdout = os.Stdout\n\tnewCmd.Stderr = os.Stderr\n\tnewCmd.Env = ctr.Config.Env\n\treturn runCommand(newCmd, detach)\n}", "func (n nexter) Fork(id string, x Fork, options ...*Option) (left, right Builder) {\n\topt := &Option{\n\t\tBufferSize: intP(0),\n\t}\n\n\tif len(options) > 0 {\n\t\topt = opt.merge(options...)\n\t}\n\n\tnext := &node{}\n\n\tleftEdge := newEdge(opt.BufferSize)\n\trightEdge := newEdge(opt.BufferSize)\n\n\tnext.vertex = vertex{\n\t\tid: id,\n\t\tvertexType: \"fork\",\n\t\tmetrics: createMetrics(id, \"fork\"),\n\t\toption: opt,\n\t\thandler: func(payload []*Packet) {\n\t\t\tlpayload, rpayload := x(payload)\n\t\t\tleftEdge.channel <- lpayload\n\t\t\trightEdge.channel <- rpayload\n\t\t},\n\t\tconnector: func(ctx context.Context, b *builder) error {\n\t\t\tif next.left == nil || next.right == nil {\n\t\t\t\treturn fmt.Errorf(\"non-terminated fork\")\n\t\t\t} else if err := next.left.cascade(ctx, b, leftEdge); err != nil {\n\t\t\t\treturn err\n\t\t\t} else if err := next.right.cascade(ctx, b, rightEdge); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\treturn nil\n\t\t},\n\t}\n\n\tnext = n(next)\n\n\treturn nexter(func(n *node) *node {\n\t\t\tnext.left = n\n\t\t\treturn n\n\t\t}), nexter(func(n *node) *node {\n\t\t\tnext.right = n\n\t\t\treturn n\n\t\t})\n}", "func (p *Port) fork() *Port {\n\treturn &Port{p.File, p.Chan, false, false}\n}", "func (pc *ProcessCacheEntry) Fork(childEntry *ProcessCacheEntry) {\n\tchildEntry.PPid = pc.Pid\n\tchildEntry.TTYName = pc.TTYName\n\tchildEntry.Comm = pc.Comm\n\tchildEntry.FileEvent = pc.FileEvent\n\tchildEntry.ContainerID = pc.ContainerID\n\tchildEntry.ExecTime = pc.ExecTime\n\tchildEntry.Credentials = pc.Credentials\n\tchildEntry.Cookie = pc.Cookie\n\n\tchildEntry.SetParent(pc)\n}", "func (ec *EvalCtx) fork(newContext string) *EvalCtx {\n\tnewPorts := make([]*Port, len(ec.ports))\n\tfor i, p := range ec.ports {\n\t\tnewPorts[i] = p.Fork()\n\t}\n\treturn &EvalCtx{\n\t\tec.Evaler,\n\t\tec.name, ec.text, newContext,\n\t\tec.local, ec.up,\n\t\tnewPorts, ec.begin, ec.end,\n\t}\n}", "func (context *Context) Fork(name string) Context {\n\tcopy := *context\n\tcopy.Context = context\n\tcopy.Name = name\n\tcopy.Logger = copy.Logger.Fork(name)\n\treturn copy\n}", "func (s *Basegff3Listener) EnterDocument(ctx *DocumentContext) {}", "func (d *Document) Parent() Parent { return d.Folder }", "func (ec *EvalCtx) fork(newContext string) *EvalCtx {\n\tnewPorts := make([]*Port, len(ec.ports))\n\tfor i, p := range ec.ports {\n\t\tnewPorts[i] = p.Fork()\n\t}\n\treturn &EvalCtx{\n\t\tec.Evaler,\n\t\tec.name, ec.text, newContext,\n\t\tec.local, ec.up,\n\t\tnewPorts,\n\t}\n}", "func forkImage(w http.ResponseWriter, r *http.Request) {\r\n\t//\tuid, _ := strconv.ParseInt(parms[\"uid\"], 10, 64)\r\n\t//\tuname, _ := parms[\"uname\"]\r\n\tvar data forkData\r\n\tif err := json.NewDecoder(r.Body).Decode(&data); err != nil {\r\n\t\tlogger.Warnf(\"error decoding image: %s\", err)\r\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\r\n\t\treturn\r\n\t}\r\n\t//can't fork one's own image\r\n\tif data.Uid == data.Image.UserId {\r\n\t\thttp.Error(w, \"Can not fork your own image\", http.StatusInternalServerError)\r\n\t\treturn\r\n\t}\r\n\terr := data.Image.UpdateFork(data.Uid, data.Uname)\r\n\tif err != nil {\r\n\t\tlogger.Warnf(\"error forking image: %s\", err)\r\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\r\n\t\treturn\r\n\t}\r\n\tw.Header().Set(\"content-type\", \"application/json\")\r\n\tw.WriteHeader(http.StatusCreated)\r\n}", "func (stack *Stack) Fork() *Stack {\n\ts := NewStack(stack.root)\n\t//s.height = stack.height\n\ts.tos = stack.tos\n\ts.tos.pathcnt++\n\treturn s\n}", "func (b *ArgumentBuilder) Fork(target CommandNode, modifier RedirectModifier) *ArgumentBuilder {\n\treturn b.Forward(target, modifier, true)\n}", "func Fork(t *kernel.Task, sysno uintptr, args arch.SyscallArguments) (uintptr, *kernel.SyscallControl, error) {\n\t// \"A call to fork() is equivalent to a call to clone(2) specifying flags\n\t// as just SIGCHLD.\" - fork(2)\n\treturn clone(t, int(linux.SIGCHLD), 0, 0, 0, 0)\n}", "func (b *RequiredArgumentBuilder) Fork(target CommandNode, modifier RedirectModifier) ArgumentNodeBuilder {\n\tb.ArgumentBuilder.Fork(target, modifier)\n\treturn b\n}", "func (d *Daemon) child() {\n\tos.Chdir(\"/\")\n\n\t// Setsid in the exec.Cmd.SysProcAttr.Setsid\n\t//syscall.Setsid()\n\n\td.setupPidfile()\n}", "func (c *OneClient) Fork(ctx context.Context, servicePath string, serviceMethod string, args interface{}, reply interface{}) error {\n\tc.mu.RLock()\n\txclient := c.xclients[servicePath]\n\tc.mu.RUnlock()\n\n\tif xclient == nil {\n\t\tvar err error\n\t\tc.mu.Lock()\n\t\txclient = c.xclients[servicePath]\n\t\tif xclient == nil {\n\t\t\txclient, err = c.newXClient(servicePath)\n\t\t\tc.xclients[servicePath] = xclient\n\t\t}\n\t\tc.mu.Unlock()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn xclient.Fork(ctx, serviceMethod, args, reply)\n}", "func ForkTree(loggedInUser, dbOwner, dbFolder, dbName string) (outputList []ForkEntry, err error) {\n\tdbQuery := `\n\t\tSELECT users.user_name, db.folder, db.db_name, db.public, db.db_id, db.forked_from, db.is_deleted\n\t\tFROM sqlite_databases AS db, users\n\t\tWHERE db.root_database = (\n\t\t\t\tSELECT root_database\n\t\t\t\tFROM sqlite_databases\n\t\t\t\tWHERE user_id = (\n\t\t\t\t\t\tSELECT user_id\n\t\t\t\t\t\tFROM users\n\t\t\t\t\t\tWHERE lower(user_name) = lower($1)\n\t\t\t\t\t)\n\t\t\t\t\tAND folder = $2\n\t\t\t\t\tAND db_name = $3\n\t\t\t\t)\n\t\t\tAND db.user_id = users.user_id\n\t\tORDER BY db.forked_from NULLS FIRST`\n\trows, err := pdb.Query(dbQuery, dbOwner, dbFolder, dbName)\n\tif err != nil {\n\t\tlog.Printf(\"Database query failed: %v\\n\", err)\n\t\treturn nil, err\n\t}\n\tdefer rows.Close()\n\tvar dbList []ForkEntry\n\tfor rows.Next() {\n\t\tvar frk pgx.NullInt64\n\t\tvar oneRow ForkEntry\n\t\terr = rows.Scan(&oneRow.Owner, &oneRow.Folder, &oneRow.DBName, &oneRow.Public, &oneRow.ID, &frk, &oneRow.Deleted)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Error retrieving fork list for '%s%s%s': %v\\n\", dbOwner, dbFolder, dbName,\n\t\t\t\terr)\n\t\t\treturn nil, err\n\t\t}\n\t\tif frk.Valid {\n\t\t\toneRow.ForkedFrom = int(frk.Int64)\n\t\t}\n\t\tdbList = append(dbList, oneRow)\n\t}\n\n\t// Safety checks\n\tnumResults := len(dbList)\n\tif numResults == 0 {\n\t\treturn nil, errors.New(\"Empty list returned instead of fork tree. This shouldn't happen\")\n\t}\n\tif dbList[0].ForkedFrom != 0 {\n\t\t// The first entry has a non-zero forked_from field, indicating it's not the root entry. That\n\t\t// shouldn't happen, so return an error.\n\t\treturn nil, errors.New(\"Incorrect root entry data in retrieved database list\")\n\t}\n\n\t// * Process the root entry *\n\n\tvar iconDepth int\n\tvar forkTrail []int\n\n\t// Set the root database ID\n\trootID := dbList[0].ID\n\n\t// Set the icon list for display in the browser\n\tdbList[0].IconList = append(dbList[0].IconList, ROOT)\n\n\t// If the root database is no longer public, then use placeholder details instead\n\tif !dbList[0].Public && (strings.ToLower(dbList[0].Owner) != strings.ToLower(loggedInUser)) {\n\t\tdbList[0].DBName = \"private database\"\n\t}\n\n\t// If the root database is deleted, use a placeholder indicating that instead\n\tif dbList[0].Deleted {\n\t\tdbList[0].DBName = \"deleted database\"\n\t}\n\n\t// Append this completed database line to the output list\n\toutputList = append(outputList, dbList[0])\n\n\t// Append the root database ID to the fork trail\n\tforkTrail = append(forkTrail, rootID)\n\n\t// Mark the root database entry as processed\n\tdbList[0].Processed = true\n\n\t// Increment the icon depth\n\ticonDepth = 1\n\n\t// * Sort the remaining entries for correct display *\n\tnumUnprocessedEntries := numResults - 1\n\tfor numUnprocessedEntries > 0 {\n\t\tvar forkFound bool\n\t\toutputList, forkTrail, forkFound = nextChild(loggedInUser, &dbList, &outputList, &forkTrail, iconDepth)\n\t\tif forkFound {\n\t\t\tnumUnprocessedEntries--\n\t\t\ticonDepth++\n\n\t\t\t// Add stems and branches to the output icon list\n\t\t\tnumOutput := len(outputList)\n\n\t\t\tmyID := outputList[numOutput-1].ID\n\t\t\tmyForkedFrom := outputList[numOutput-1].ForkedFrom\n\n\t\t\t// Scan through the earlier output list for any sibling entries\n\t\t\tvar siblingFound bool\n\t\t\tfor i := numOutput; i > 0 && siblingFound == false; i-- {\n\t\t\t\tthisID := outputList[i-1].ID\n\t\t\t\tthisForkedFrom := outputList[i-1].ForkedFrom\n\n\t\t\t\tif thisForkedFrom == myForkedFrom && thisID != myID {\n\t\t\t\t\t// Sibling entry found\n\t\t\t\t\tsiblingFound = true\n\t\t\t\t\tsibling := outputList[i-1]\n\n\t\t\t\t\t// Change the last sibling icon to a branch icon\n\t\t\t\t\tsibling.IconList[iconDepth-1] = BRANCH\n\n\t\t\t\t\t// Change appropriate spaces to stems in the output icon list\n\t\t\t\t\tfor l := numOutput - 1; l > i; l-- {\n\t\t\t\t\t\tthisEntry := outputList[l-1]\n\t\t\t\t\t\tif thisEntry.IconList[iconDepth-1] == SPACE {\n\t\t\t\t\t\t\tthisEntry.IconList[iconDepth-1] = STEM\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\t// No child was found, so remove an entry from the fork trail then continue looping\n\t\t\tforkTrail = forkTrail[:len(forkTrail)-1]\n\n\t\t\ticonDepth--\n\t\t}\n\t}\n\n\treturn outputList, nil\n}", "func ForkedFrom(dbOwner, dbFolder, dbName string) (forkOwn, forkFol, forkDB string, forkDel bool, err error) {\n\t// Check if the database was forked from another\n\tvar dbID, forkedFrom pgx.NullInt64\n\tdbQuery := `\n\t\tSELECT db_id, forked_from\n\t\tFROM sqlite_databases\n\t\tWHERE user_id = (\n\t\t\t\tSELECT user_id\n\t\t\t\tFROM users\n\t\t\t\tWHERE lower(user_name) = lower($1))\n\t\t\tAND folder = $2\n\t\t\tAND db_name = $3`\n\terr = pdb.QueryRow(dbQuery, dbOwner, dbFolder, dbName).Scan(&dbID, &forkedFrom)\n\tif err != nil {\n\t\tlog.Printf(\"Error checking if database was forked from another '%s%s%s'. Error: %v\\n\", dbOwner,\n\t\t\tdbFolder, dbName, err)\n\t\treturn \"\", \"\", \"\", false, err\n\t}\n\tif !forkedFrom.Valid {\n\t\t// The database wasn't forked, so return empty strings\n\t\treturn \"\", \"\", \"\", false, nil\n\t}\n\n\t// Return the details of the database this one was forked from\n\tdbQuery = `\n\t\tSELECT u.user_name, db.folder, db.db_name, db.is_deleted\n\t\tFROM users AS u, sqlite_databases AS db\n\t\tWHERE db.db_id = $1\n\t\t\tAND u.user_id = db.user_id`\n\terr = pdb.QueryRow(dbQuery, forkedFrom).Scan(&forkOwn, &forkFol, &forkDB, &forkDel)\n\tif err != nil {\n\t\tlog.Printf(\"Error retrieving forked database information for '%s%s%s'. Error: %v\\n\", dbOwner,\n\t\t\tdbFolder, dbName, err)\n\t\treturn \"\", \"\", \"\", false, err\n\t}\n\n\t// If the database this one was forked from has been deleted, indicate that and clear the database name value\n\tif forkDel {\n\t\tforkDB = \"\"\n\t}\n\treturn forkOwn, forkFol, forkDB, forkDel, nil\n}", "func Fork() (int, error) {\n\targs := os.Args[1:]\n\tcmd := exec.Command(os.Args[0], args...)\n\tcmd.Env = os.Environ()\n\tcmd.Stdin = nil\n\tcmd.Stdout = nil\n\tcmd.Stderr = nil\n\tcmd.ExtraFiles = nil\n\t// setsid is used to detach the process from the parent (normally a shell)\n\tcmd.SysProcAttr = &syscall.SysProcAttr{\n\t\tSetsid: true,\n\t}\n\tif err := cmd.Start(); err != nil {\n\t\treturn 0, err\n\t}\n\treturn cmd.Process.Pid, nil\n}", "func (tdb *TyposDatasetBuilder) Fork(n int) []core.PipelineItem {\n\treturn core.ForkSamePipelineItem(tdb, n)\n}", "func forkAndExecInChild(argv0 *byte, argv []*byte, envv []envItem, dir *byte, attr *ProcAttr, pipe int, rflag int) (pid int, err error) {\n\t// Declare all variables at top in case any\n\t// declarations require heap allocation (e.g., errbuf).\n\tvar (\n\t\tr1 uintptr\n\t\tnextfd int\n\t\ti int\n\t\tclearenv int\n\t\tenvfd int\n\t\terrbuf [ERRMAX]byte\n\t\tstatbuf [STATMAX]byte\n\t\tdupdevfd int\n\t)\n\n\t// Guard against side effects of shuffling fds below.\n\t// Make sure that nextfd is beyond any currently open files so\n\t// that we can't run the risk of overwriting any of them.\n\tfd := make([]int, len(attr.Files))\n\tnextfd = len(attr.Files)\n\tfor i, ufd := range attr.Files {\n\t\tif nextfd < int(ufd) {\n\t\t\tnextfd = int(ufd)\n\t\t}\n\t\tfd[i] = int(ufd)\n\t}\n\tnextfd++\n\n\tif envv != nil {\n\t\tclearenv = RFCENVG\n\t}\n\n\t// About to call fork.\n\t// No more allocation or calls of non-assembly functions.\n\tr1, _, _ = RawSyscall(SYS_RFORK, uintptr(RFPROC|RFFDG|RFREND|clearenv|rflag), 0, 0)\n\n\tif r1 != 0 {\n\t\tif int32(r1) == -1 {\n\t\t\treturn 0, NewError(errstr())\n\t\t}\n\t\t// parent; return PID\n\t\treturn int(r1), nil\n\t}\n\n\t// Fork succeeded, now in child.\n\n\t// Close fds we don't need.\n\tr1, _, _ = RawSyscall(SYS_OPEN, uintptr(unsafe.Pointer(dupdev)), uintptr(O_RDONLY), 0)\n\tdupdevfd = int(r1)\n\tif dupdevfd == -1 {\n\t\tgoto childerror\n\t}\ndirloop:\n\tfor {\n\t\tr1, _, _ = RawSyscall6(SYS_PREAD, uintptr(dupdevfd), uintptr(unsafe.Pointer(&statbuf[0])), uintptr(len(statbuf)), ^uintptr(0), ^uintptr(0), 0)\n\t\tn := int(r1)\n\t\tswitch n {\n\t\tcase -1:\n\t\t\tgoto childerror\n\t\tcase 0:\n\t\t\tbreak dirloop\n\t\t}\n\t\tfor b := statbuf[:n]; len(b) > 0; {\n\t\t\tvar s []byte\n\t\t\ts, b = gdirname(b)\n\t\t\tif s == nil {\n\t\t\t\tcopy(errbuf[:], ErrBadStat.Error())\n\t\t\t\tgoto childerror1\n\t\t\t}\n\t\t\tif s[len(s)-1] == 'l' {\n\t\t\t\t// control file for descriptor <N> is named <N>ctl\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tcloseFdExcept(int(atoi(s)), pipe, dupdevfd, fd)\n\t\t}\n\t}\n\tRawSyscall(SYS_CLOSE, uintptr(dupdevfd), 0, 0)\n\n\t// Write new environment variables.\n\tif envv != nil {\n\t\tfor i = 0; i < len(envv); i++ {\n\t\t\tr1, _, _ = RawSyscall(SYS_CREATE, uintptr(unsafe.Pointer(envv[i].name)), uintptr(O_WRONLY), uintptr(0666))\n\n\t\t\tif int32(r1) == -1 {\n\t\t\t\tgoto childerror\n\t\t\t}\n\n\t\t\tenvfd = int(r1)\n\n\t\t\tr1, _, _ = RawSyscall6(SYS_PWRITE, uintptr(envfd), uintptr(unsafe.Pointer(envv[i].value)), uintptr(envv[i].nvalue),\n\t\t\t\t^uintptr(0), ^uintptr(0), 0)\n\n\t\t\tif int32(r1) == -1 || int(r1) != envv[i].nvalue {\n\t\t\t\tgoto childerror\n\t\t\t}\n\n\t\t\tr1, _, _ = RawSyscall(SYS_CLOSE, uintptr(envfd), 0, 0)\n\n\t\t\tif int32(r1) == -1 {\n\t\t\t\tgoto childerror\n\t\t\t}\n\t\t}\n\t}\n\n\t// Chdir\n\tif dir != nil {\n\t\tr1, _, _ = RawSyscall(SYS_CHDIR, uintptr(unsafe.Pointer(dir)), 0, 0)\n\t\tif int32(r1) == -1 {\n\t\t\tgoto childerror\n\t\t}\n\t}\n\n\t// Pass 1: look for fd[i] < i and move those up above len(fd)\n\t// so that pass 2 won't stomp on an fd it needs later.\n\tif pipe < nextfd {\n\t\tr1, _, _ = RawSyscall(SYS_DUP, uintptr(pipe), uintptr(nextfd), 0)\n\t\tif int32(r1) == -1 {\n\t\t\tgoto childerror\n\t\t}\n\t\tpipe = nextfd\n\t\tnextfd++\n\t}\n\tfor i = 0; i < len(fd); i++ {\n\t\tif fd[i] >= 0 && fd[i] < int(i) {\n\t\t\tif nextfd == pipe { // don't stomp on pipe\n\t\t\t\tnextfd++\n\t\t\t}\n\t\t\tr1, _, _ = RawSyscall(SYS_DUP, uintptr(fd[i]), uintptr(nextfd), 0)\n\t\t\tif int32(r1) == -1 {\n\t\t\t\tgoto childerror\n\t\t\t}\n\n\t\t\tfd[i] = nextfd\n\t\t\tnextfd++\n\t\t}\n\t}\n\n\t// Pass 2: dup fd[i] down onto i.\n\tfor i = 0; i < len(fd); i++ {\n\t\tif fd[i] == -1 {\n\t\t\tRawSyscall(SYS_CLOSE, uintptr(i), 0, 0)\n\t\t\tcontinue\n\t\t}\n\t\tif fd[i] == int(i) {\n\t\t\tcontinue\n\t\t}\n\t\tr1, _, _ = RawSyscall(SYS_DUP, uintptr(fd[i]), uintptr(i), 0)\n\t\tif int32(r1) == -1 {\n\t\t\tgoto childerror\n\t\t}\n\t}\n\n\t// Pass 3: close fd[i] if it was moved in the previous pass.\n\tfor i = 0; i < len(fd); i++ {\n\t\tif fd[i] >= 0 && fd[i] != int(i) {\n\t\t\tRawSyscall(SYS_CLOSE, uintptr(fd[i]), 0, 0)\n\t\t}\n\t}\n\n\t// Time to exec.\n\tr1, _, _ = RawSyscall(SYS_EXEC,\n\t\tuintptr(unsafe.Pointer(argv0)),\n\t\tuintptr(unsafe.Pointer(&argv[0])), 0)\n\nchilderror:\n\t// send error string on pipe\n\tRawSyscall(SYS_ERRSTR, uintptr(unsafe.Pointer(&errbuf[0])), uintptr(len(errbuf)), 0)\nchilderror1:\n\terrbuf[len(errbuf)-1] = 0\n\ti = 0\n\tfor i < len(errbuf) && errbuf[i] != 0 {\n\t\ti++\n\t}\n\n\tRawSyscall6(SYS_PWRITE, uintptr(pipe), uintptr(unsafe.Pointer(&errbuf[0])), uintptr(i),\n\t\t^uintptr(0), ^uintptr(0), 0)\n\n\tfor {\n\t\tRawSyscall(SYS_EXITS, 0, 0, 0)\n\t}\n}", "func partitionText(inputFile string, pageNum int) {\n\tfilePath := inputFile\n\tfmt.Printf(\"working with file %s\\n\", filePath)\n\tpdfReader, f, err := model.NewPdfReaderFromFile(filePath, nil)\n\tdefer f.Close()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tpage, err := pdfReader.GetPage(pageNum)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tex, err := extractor.New(page)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tpageText, _, _, err := ex.ExtractPageText()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttext := pageText.Text()\n\tvar prevTable *extractor.TextTable\n\tcount := 0\n\tindexes := [][]int{}\n\tindex := []int{}\n\tfor _, tMark := range pageText.Marks().Elements() {\n\t\tcurrentTable, _ := tMark.TableInfo()\n\t\tif prevTable == nil && currentTable != nil {\n\t\t\t// new table here\n\t\t\tindex = append(index, count)\n\t\t} else if prevTable != nil && currentTable == nil {\n\t\t\t// end of table here\n\t\t\tindex = append(index, count)\n\t\t\tindexes = append(indexes, index)\n\t\t\tindex = []int{}\n\t\t}\n\t\tprevTable = currentTable\n\t\tcount += len(tMark.Text)\n\t}\n\tbeg := 0\n\tfor i, idx := range indexes {\n\t\tidx1 := idx[0]\n\t\tidx2 := idx[1]\n\t\tfmt.Printf(\"\\n------------------- outside table begins ---------------\\n\")\n\t\tfmt.Print(text[beg:idx1])\n\t\tfmt.Printf(\"\\n------------------- outside table ends ------------------\\n\")\n\t\tfmt.Printf(\"\\n------------------- inside table begins -----------------\\n\")\n\t\tfmt.Print(text[idx1:idx2])\n\t\tfmt.Printf(\"\\n------------------- inside table ends -------------------\\n\")\n\t\tbeg = idx2\n\t\tif i == len(indexes)-1 {\n\t\t\tfmt.Printf(\"\\n--------------- out side table begins --------------\\n\")\n\t\t\tfmt.Print(text[beg:])\n\t\t\tfmt.Printf(\"\\n--------------- outside table ends ------------------\\n\")\n\t\t}\n\t}\n}", "func (b *LiteralArgumentBuilder) Fork(target CommandNode, modifier RedirectModifier) LiteralNodeBuilder {\n\tb.ArgumentBuilder.Fork(target, modifier)\n\treturn b\n}", "func prepDocument(doc *goquery.Document) {\n\t// Remove all style tags in head\n\tdoc.Find(\"style\").Remove()\n\n\t// Replace all br\n\treplaceBrs(doc)\n\n\t// Replace font tags to span\n\tdoc.Find(\"font\").Each(func(_ int, font *goquery.Selection) {\n\t\thtml, _ := font.Html()\n\t\tfont.ReplaceWithHtml(\"<span>\" + html + \"</span>\")\n\t})\n}", "func godoc(s selection, args []string) {\n\tfmt.Println(runWithStdin(s.archive(), \"gogetdoc\", \"-modified\", \"-pos\", s.pos()))\n}", "func daemonise() {\n\t// Drop privileges by switching to nobody user and group\n\tif _, _, err := syscall.Syscall(syscall.SYS_SETGID, 65534, 0, 0); err != 0 {\n\t\tos.Exit(1)\n\t}\n\tif _, _, err := syscall.Syscall(syscall.SYS_SETUID, 65534, 0, 0); err != 0 {\n\t\tos.Exit(1)\n\t}\n\n\t// Do first fork\n\tpid, _, _ := syscall.Syscall(syscall.SYS_FORK, 0, 0, 0)\n\n\t// Exit in parent process\n\tswitch pid {\n\tcase 0:\n\t\t// Child process, carry on\n\t\tbreak\n\tdefault:\n\t\t// Parent process, exit cleanly\n\t\tos.Exit(0)\n\t}\n\n\t// Call setsid\n\t_, err := syscall.Setsid()\n\tif err != nil {\n\t\tos.Exit(1)\n\t}\n\n\t// Fork again\n\tpid, _, _ = syscall.Syscall(syscall.SYS_FORK, 0, 0, 0)\n\n\t// Exit in parent again\n\tswitch pid {\n\tcase 0:\n\t\t// Child process, carry on\n\t\tbreak\n\tdefault:\n\t\t// Parent process, exit cleanly\n\t\tos.Exit(0)\n\t}\n\n\t// Clear umask\n\tsyscall.Umask(0)\n\n\t// Change working directory\n\terr = syscall.Chdir(\"/\")\n\tif err != nil {\n\t\tos.Exit(1)\n\t}\n\n\t// Duplicate /dev/null to stdin, stdout and stderr\n\tnullFile, err := os.OpenFile(\"/dev/null\", os.O_RDWR, 0)\n\tif err != nil {\n\t\tos.Exit(1)\n\t}\n\tnullFd := nullFile.Fd()\n\tsyscall.Dup2(int(nullFd), int(os.Stdin.Fd()))\n\tsyscall.Dup2(int(nullFd), int(os.Stdout.Fd()))\n\tsyscall.Dup2(int(nullFd), int(os.Stderr.Fd()))\n\n}", "func main() {\n\tfmt.Println(\"*** START ***\")\n\n pathGen, _ := os.Getwd()\n pathRoo := strings.Replace(pathGen, \"/_gen\", \"\", -1)\n\tpathSrc := path.Join(pathRoo, \"/_src\")\n\tpathPst := path.Join(pathSrc, \"/posts\")\n\tpathTmp := path.Join(pathSrc, \"/templates\")\n\n\tfileTmp := GetFile(path.Join(pathTmp, \"post.html\"))\n\n\tProcessClean(pathRoo)\n\tProcessPosts(fileTmp, pathPst, pathRoo)\n\n\tfmt.Println(\"*** END ***\")\n}", "func (self *Fork) Run() {\n var taken = false\n\n for {\n if taken {\n <- self.Release\n taken = false\n } else {\n <- self.Take\n taken = true\n }\n }\n}", "func (w *Worker) startWorker() {\n\tw.wg.Add(1)\n\tdefer w.wg.Done()\n\n\tfor p := range w.InPage {\n\t\tlog.Println(\"processing title: \", p.Title)\n\n\t\t// Skip redirect titles, which have no text that needs parsing\n\t\tif strings.HasPrefix(p.Revision.Text.Text, \"#REDIRECT\") {\n\t\t\toutput, err := xml.Marshal(p)\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t\tw.OutText <- output\n\t\t\tcontinue\n\t\t}\n\n\t\t// We will temporarily swap the URL link symbols so we don't parse that\n\t\tp.Revision.Text.Text = strings.ReplaceAll(p.Revision.Text.Text, \"[[\", `<SPEC_START>`)\n\t\tp.Revision.Text.Text = strings.ReplaceAll(p.Revision.Text.Text, `]]`, `<SPEC_END>`)\n\n\t\tcmd := exec.Command(w.ParseScript)\n\n\t\tvar b bytes.Buffer\n\t\tb.Write([]byte(p.Revision.Text.Text))\n\n\t\tcmd.Stdin = &b\n\n\t\tclean, err := cmd.CombinedOutput()\n\t\tif err != nil {\n\t\t\tlog.Printf(\"error parsing title %s. Skipping\", p.Title)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Reverse the url text changes\n\t\tnew := strings.ReplaceAll(string(clean), `<SPEC_START>`, `[[`)\n\t\tnew = strings.ReplaceAll(new, `<SPEC_END>`, `]]`)\n\t\tp.Revision.Text.Text = new\n\n\t\toutput, err := xml.MarshalIndent(p, \" \", \" \")\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tw.OutText <- output\n\t}\n\n\tlog.Println(\"exiting xml worker\")\n}", "func setParent(child, parent string) error {\n\tlogrus.Debugln(\"child:\", child, \"parent:\", parent)\n\tpath := METAPATH + child\n\tlogrus.Debugln(\"path:\", path)\n\n\t// if parent meta already exit, do nothing\n\t//if exit, err := isParentMetaExist(path); exit {\n\t//\t// fixme: fix op not permitted error\n\t//\tif err != nil {\n\t//\t\tlogrus.Warnln(\"check exit get error:\", err)\n\t//\t}\n\t//\treturn nil\n\t//}\n\n\t// set relationship\n\t// fixme: perm mode may not be correct\n\tos.Mkdir(path, 0777)\n\tfd, err := os.Create(path + \"/parent\")\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer fd.Close()\n\n\tfd.WriteString(parent)\n\treturn nil\n}", "func forkAndExec(l net.Conn) error {\n\targv0, err := lookPath()\n\tif nil != err {\n\t\treturn err\n\t}\n\twd, err := os.Getwd()\n\tif nil != err {\n\t\treturn err\n\t}\n\tfd, err := setEnvs(l)\n\tif nil != err {\n\t\treturn err\n\t}\n\tif err := os.Setenv(\"RESTART_PID\", \"\"); nil != err {\n\t\treturn err\n\t}\n\tif err := os.Setenv(\n\t\t\"RESTART_PPID\",\n\t\tfmt.Sprint(syscall.Getpid()),\n\t); nil != err {\n\t\treturn err\n\t}\n\tfiles := make([]*os.File, fd+1)\n\tfiles[syscall.Stdin] = os.Stdin\n\tfiles[syscall.Stdout] = os.Stdout\n\tfiles[syscall.Stderr] = os.Stderr\n\taddr := l.RemoteAddr()\n\tfiles[fd] = os.NewFile(\n\t\tfd,\n\t\tfmt.Sprintf(\"%s:%s->\", addr.Network(), addr.String()),\n\t)\n\tp, err := os.StartProcess(argv0, os.Args, &os.ProcAttr{\n\t\tDir: wd,\n\t\tEnv: os.Environ(),\n\t\tFiles: files,\n\t\tSys: &syscall.SysProcAttr{},\n\t})\n\tif nil != err {\n\t\treturn err\n\t}\n\tlog.Infoln(\"Spawned new GRAIN child (pid: \", p.Pid, \")\")\n\tif err = os.Setenv(\"RESTART_PID\", fmt.Sprint(p.Pid)); nil != err {\n\t\treturn err\n\t}\n\treturn nil\n}", "func newDoc(c *gin.Context) {\n\tkey := uuid.New()\n\tres := saveDocument(key, c)\n\tif res.Ok == false {\n\t\tif res.Message == \"file exists\" {\n\t\t\tc.JSON(fileExistsErr, res)\n\t\t} else {\n\t\t\tlog.Printf(\"Error saving document: %s\", res.Error)\n\t\t\tc.JSON(statusErr, res)\n\t\t}\n\t} else {\n\t\tc.JSON(statusOk, res)\n\t}\n}", "func forkRestartProcess(ctx context.Context, newExeFilePath ...string) error {\n\tvar (\n\t\tpath = os.Args[0]\n\t)\n\tif len(newExeFilePath) > 0 {\n\t\tpath = newExeFilePath[0]\n\t}\n\tif err := os.Unsetenv(adminActionReloadEnvKey); err != nil {\n\t\tintlog.Errorf(ctx, `%+v`, err)\n\t}\n\tenv := os.Environ()\n\tenv = append(env, adminActionRestartEnvKey+\"=1\")\n\tp := gproc.NewProcess(path, os.Args, env)\n\tif _, err := p.Start(ctx); err != nil {\n\t\tglog.Errorf(\n\t\t\tctx,\n\t\t\t`%d: fork process failed, error:%s, are you running using \"go run\"?`,\n\t\t\tgproc.Pid(), err.Error(),\n\t\t)\n\t\treturn err\n\t}\n\treturn nil\n}", "func createFork() Fork {\n return Fork { make(chan bool), make(chan bool)}\n}", "func (client *Client) processDriveFiles(env *util.Env, baseSlug string, parentID string, pages *pages) {\n\tr, err := client.Service.Files.List().\n\t\tPageSize(1000). // OK for now. Right?\n\t\tQ(\"'\" + parentID + \"' in parents and trashed=false\").\n\t\tDo()\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to retrieve files from google drive: %v\\n\", err)\n\t\treturn\n\t}\n\n\tif len(r.Files) > 0 {\n\t\tfor _, i := range r.Files {\n\t\t\t// Grab the sort order and title from formatted title names.\n\t\t\tparts, err := getPartsFromTitle(i.Name)\n\t\t\tif err != nil {\n\t\t\t\tlog.Printf(\"Skipping document. There was an issue getting parts from title: %s\\n\", err.Error())\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t// If the format was incorrect an empty struct will be returned.\n\t\t\tif parts.Title == \"\" {\n\t\t\t\tlog.Printf(\"Skipping document because of format: %s\\n\", i.Name)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Define the page that will be saved.\n\t\t\tnewPage := &model.Page{}\n\t\t\tnewPage.Name = parts.Title\n\t\t\tnewPage.DocID = i.Id\n\t\t\tnewPage.Order = parts.Order\n\t\t\tnewPage.Created = i.CreatedTime\n\t\t\tnewPage.Updated = i.ModifiedTime\n\n\t\t\t// Switch depending on type of ducment.\n\t\t\tswitch mime := i.MimeType; mime {\n\t\t\tcase \"application/vnd.google-apps.document\":\n\t\t\t\thtmlBytes, err := client.getContents(i.Id, \"text/html\")\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Skipping. There was an error grabbing the contents for a document: %s\", err.Error())\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tmd, err := MarshalMarkdownFromHTML(bytes.NewReader(htmlBytes))\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"There was a problem parsing html to markdown: %s\", err.Error())\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\n\t\t\t\tnewPage.Md = md\n\t\t\t\tnewPage.HTML = string(blackfriday.Run(\n\t\t\t\t\t[]byte(md),\n\t\t\t\t\tblackfriday.WithExtensions(\n\t\t\t\t\t\tblackfriday.Tables|blackfriday.AutoHeadingIDs|blackfriday.FencedCode,\n\t\t\t\t\t),\n\t\t\t\t))\n\n\t\t\t\tnewPage.Type = \"file\"\n\n\t\t\t\tif parts.Order == 0 {\n\t\t\t\t\t// If the order is 0, always take on the same path as the directory.\n\t\t\t\t\tnewPage.Slug = baseSlug\n\t\t\t\t} else {\n\t\t\t\t\tif baseSlug != \"\" {\n\t\t\t\t\t\tnewPage.Slug = baseSlug + \"/\" + util.MarshalSlug(parts.Title)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tnewPage.Slug = baseSlug + util.MarshalSlug(parts.Title)\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tlog.Printf(\"Saving page \\\"%s\\\" with slug \\\"%s\\\".\\n\", newPage.Name, newPage.Slug)\n\t\t\t\tpages.appendPage(newPage)\n\n\t\t\tcase \"application/vnd.google-apps.folder\":\n\t\t\t\tvar dirBaseSlug string\n\n\t\t\t\tif baseSlug != \"\" {\n\t\t\t\t\tdirBaseSlug = baseSlug + \"/\" + util.MarshalSlug(parts.Title)\n\t\t\t\t} else {\n\t\t\t\t\tdirBaseSlug = util.MarshalSlug(parts.Title)\n\t\t\t\t}\n\t\t\t\tnewPage.Type = \"dir\"\n\t\t\t\tnewPage.Slug = dirBaseSlug\n\t\t\t\tlog.Printf(\"Saving directory \\\"%s\\\" with slug \\\"%s\\\".\\n\", newPage.Name, newPage.Slug)\n\t\t\t\tpages.appendPage(newPage)\n\n\t\t\t\tlog.Printf(\"Submerging deeper into %s\\n\", i.Name)\n\t\t\t\tclient.processDriveFiles(env, dirBaseSlug, i.Id, pages)\n\t\t\tdefault:\n\t\t\t\tlog.Printf(\"Unknown filetype in drive directory: %s\\n\", mime)\n\t\t\t}\n\t\t}\n\t} else {\n\t\tlog.Println(\"No files found.\")\n\t}\n}", "func getFile(doc *job.Document, c chan<- *job.Document, e chan<- error) {\n\tdata, err := doc.Get()\n\tif err != nil {\n\t\te <- fmt.Errorf(\"%v: %v\", doc.Key, err)\n\t\treturn\n\t}\n\tpath := doc.LocalPath()\n\terr = ioutil.WriteFile(path, data, 0644)\n\tif err != nil {\n\t\te <- fmt.Errorf(\"%v: %v\", doc.Key, err)\n\t\treturn\n\t}\n\terr = doc.GetMetadata(cpdf.New(path, doc.Id()))\n\tif err != nil {\n\t\te <- fmt.Errorf(\"while extracting metadata from '%v': %v\", doc.Key, err)\n\t\treturn\n\t}\n\tc <- doc\n}", "func Fork() (int, error) {\n\targs := os.Args[1:]\n\tcmd := exec.Command(os.Args[0], args...)\n\tcmd.Env = os.Environ()\n\tcmd.Stdin = nil\n\tcmd.Stdout = nil\n\tcmd.Stderr = nil\n\tcmd.ExtraFiles = nil\n\tcmd.SysProcAttr = &syscall.SysProcAttr{\n\t\t// Setsid is used to detach the process from the parent (normally a shell)\n\t\t//\n\t\t// The disowning of a child process is accomplished by executing the system call\n\t\t// setpgrp() or setsid(), (both of which have the same functionality) as soon as\n\t\t// the child is forked. These calls create a new process session group, make the\n\t\t// child process the session leader, and set the process group ID to the process\n\t\t// ID of the child. https://bsdmag.org/unix-kernel-system-calls/\n\t\tSetsid: true,\n\t}\n\tif err := cmd.Start(); err != nil {\n\t\treturn 0, err\n\t}\n\treturn cmd.Process.Pid, nil\n}", "func Fork(envName string, fd *os.File, args []string) (int, error) {\n\tcmd := exec.Command(os.Args[0], args...)\n\tval := os.Getenv(envName)\n\tif val == \"\" { //若未设置则为空字符串\n\t\t//为子进程设置特殊的环境变量标识\n\t\tos.Setenv(envName, \"daemon\")\n\t}\n\tcmd.Env = os.Environ()\n\tcmd.Stdin = nil\n\t//为捕获执行程序的输出,非设置新进程的os.Stdout 不要理解错\n\t//新进程的os.Stdout.Name()值还是默认值,但输出到/dev/stdout的这边能获取到\n\t//这边必须设置,否则新进程内的错误可能捕获不到\n\t// 用 os.NewFile(uintptr(syscall.Stderr), \"/dev/stderr\").WriteString(\"test\\n\") 复现\n\tcmd.Stdout = fd\n\tcmd.Stderr = fd\n\tcmd.ExtraFiles = nil\n\tif err := cmd.Start(); err != nil {\n\t\treturn 0, err\n\t}\n\treturn cmd.Process.Pid, nil\n}", "func CallParentPrerun(cmd *cobra.Command, args []string) error {\n\t// if the executing subcommand has no PersistentPreRunE,\n\t// the most recent parent.PersistentPreRunE will execute.\n\t// therefore, we should skip the first parent's PersistentPreRunE\n\t// as it is the currently executing PersistentPreRunE\n\tskipFirstPreRun := cmd.PersistentPreRunE == nil\n\n\tfor parent := cmd.Parent(); parent != nil; parent = parent.Parent() {\n\t\tif parent.PersistentPreRunE != nil {\n\t\t\tif skipFirstPreRun {\n\t\t\t\tskipFirstPreRun = false\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif err := parent.PersistentPreRunE(parent, args); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (tangle *Tangle) Fork(transactionID transaction.ID, conflictingInputs []transaction.OutputID) (forked bool, finalized bool, err error) {\n\tcachedTransaction := tangle.Transaction(transactionID)\n\tcachedTransactionMetadata := tangle.TransactionMetadata(transactionID)\n\tdefer cachedTransaction.Release()\n\tdefer cachedTransactionMetadata.Release()\n\n\ttx := cachedTransaction.Unwrap()\n\tif tx == nil {\n\t\terr = fmt.Errorf(\"failed to load transaction '%s'\", transactionID)\n\n\t\treturn\n\t}\n\ttxMetadata := cachedTransactionMetadata.Unwrap()\n\tif txMetadata == nil {\n\t\terr = fmt.Errorf(\"failed to load metadata of transaction '%s'\", transactionID)\n\n\t\treturn\n\t}\n\n\t// abort if this transaction was finalized already\n\tif txMetadata.Finalized() {\n\t\tfinalized = true\n\n\t\treturn\n\t}\n\n\t// update / create new branch\n\tnewBranchID := branchmanager.NewBranchID(tx.ID())\n\tcachedTargetBranch, newBranchCreated := tangle.branchManager.Fork(newBranchID, []branchmanager.BranchID{txMetadata.BranchID()}, conflictingInputs)\n\tdefer cachedTargetBranch.Release()\n\n\t// set branch to be preferred if the underlying transaction was marked as preferred\n\tif txMetadata.Preferred() {\n\t\tif _, err = tangle.branchManager.SetBranchPreferred(newBranchID, true); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\t// abort if the branch existed already\n\tif !newBranchCreated {\n\t\treturn\n\t}\n\n\t// move transactions to new branch\n\tif err = tangle.moveTransactionToBranch(cachedTransaction.Retain(), cachedTransactionMetadata.Retain(), cachedTargetBranch.Retain()); err != nil {\n\t\treturn\n\t}\n\n\t// trigger events + set result\n\ttangle.Events.Fork.Trigger(cachedTransaction, cachedTransactionMetadata)\n\tforked = true\n\n\treturn\n}", "func (contract SideFluenceContract) DisputeFork(block1 SideBlock, block2 SideBlock) {\n if block1.PrevBlockHash == block2.PrevBlockHash {\n\n // violation! let's punish producers signed both blocks!\n var m = make(map[PublicKey]bool)\n for _, seal := range block1.Signatures {\n m[seal.PublicKey] = true\n }\n\n for _, seal := range block2.Signatures {\n if m[seal.PublicKey] {\n contract.base.NodesDeposits[seal.PublicKey] = 0\n }\n }\n }\n}", "func (c APIClient) StartCommitParent(repoName string, branch string, parentCommit string) (*pfs.Commit, error) {\n\tcommit, err := c.PfsAPIClient.StartCommit(\n\t\tc.Ctx(),\n\t\t&pfs.StartCommitRequest{\n\t\t\tParent: &pfs.Commit{\n\t\t\t\tRepo: &pfs.Repo{\n\t\t\t\t\tName: repoName,\n\t\t\t\t},\n\t\t\t\tID: parentCommit,\n\t\t\t},\n\t\t\tBranch: branch,\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, grpcutil.ScrubGRPC(err)\n\t}\n\treturn commit, nil\n}", "func (l *Unforker) maybeSplitMultidocYaml(ctx context.Context, localPath string) error {\n\treturn util.MaybeSplitMultidocYaml(ctx, l.FS, localPath)\n}", "func (blk *Block) drawToPage(page *model.PdfPage) error {\n\n\t// TODO(gunnsth): Appears very wasteful to do this all the time.\n\t// Possibly create another wrapper around model.PdfPage (creator.page) which can keep track of whether\n\t// this has already been done.\n\n\t// Check if Page contents are wrapped - if not wrap it.\n\tcontent, err := page.GetAllContentStreams()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcontentParser := contentstream.NewContentStreamParser(content)\n\tops, err := contentParser.Parse()\n\tif err != nil {\n\t\treturn err\n\t}\n\tops.WrapIfNeeded()\n\n\t// Ensure resource dictionaries are available.\n\tif page.Resources == nil {\n\t\tpage.Resources = model.NewPdfPageResources()\n\t}\n\n\t// Merge the contents into ops.\n\terr = mergeContents(ops, page.Resources, blk.contents, blk.resources)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Merge resources for blocks which were created from pages.\n\t// Necessary for adding resources which do not appear in the block contents.\n\tif err = mergeResources(blk.resources, page.Resources); err != nil {\n\t\treturn err\n\t}\n\n\terr = page.SetContentStreams([]string{string(ops.Bytes())}, core.NewFlateEncoder())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Add block annotations to the page.\n\tfor _, annotation := range blk.annotations {\n\t\tpage.AddAnnotation(annotation)\n\t}\n\n\treturn nil\n}", "func (obj *Doc) Resume(ctx context.Context) error {\n\terr := obj.RPC(ctx, \"Resume\", nil)\n\treturn err\n}", "func (ra *RenameAnalysis) Fork(n int) []core.PipelineItem {\n\treturn core.ForkSamePipelineItem(ra, n)\n}", "func (c *client) EnsureFork(forkingUser, org, repo string) (string, error) {\n\t// Fork repo if it doesn't exist.\n\tfork := forkingUser + \"/\" + repo\n\trepos, err := c.GetRepos(forkingUser, true)\n\tif err != nil {\n\t\treturn repo, fmt.Errorf(\"could not fetch all existing repos: %w\", err)\n\t}\n\t// if the repo does not exist, or it does, but is not a fork of the repo we want\n\tif forkedRepo := getFork(fork, repos); forkedRepo == nil || forkedRepo.Parent.FullName != fmt.Sprintf(\"%s/%s\", org, repo) {\n\t\tif name, err := c.CreateFork(org, repo); err != nil {\n\t\t\treturn repo, fmt.Errorf(\"cannot fork %s/%s: %w\", org, repo, err)\n\t\t} else {\n\t\t\t// we got a fork but it may be named differently\n\t\t\trepo = name\n\t\t}\n\t\tif err := c.waitForRepo(forkingUser, repo); err != nil {\n\t\t\treturn repo, fmt.Errorf(\"fork of %s/%s cannot show up on GitHub: %w\", org, repo, err)\n\t\t}\n\t}\n\treturn repo, nil\n\n}", "func createNewProposal(t *testing.T, id *identity.FullIdentity, files []www.File) *www.NewProposal {\n\tt.Helper()\n\n\tif len(files) == 0 {\n\t\tt.Fatalf(\"no files found\")\n\t}\n\n\t// Compute merkle\n\tdigests := make([]*[sha256.Size]byte, 0, len(files))\n\tfor _, f := range files {\n\t\td, ok := util.ConvertDigest(f.Digest)\n\t\tif !ok {\n\t\t\tt.Fatalf(\"could not convert digest %v\", f.Digest)\n\t\t}\n\t\tdigests = append(digests, &d)\n\t}\n\troot := hex.EncodeToString(merkle.Root(digests)[:])\n\n\t// Sign merkle\n\tsig := id.SignMessage([]byte(root))\n\n\treturn &www.NewProposal{\n\t\tFiles: files,\n\t\tPublicKey: hex.EncodeToString(id.Public.Key[:]),\n\t\tSignature: hex.EncodeToString(sig[:]),\n\t}\n}", "func (c *client) CreateFork(owner, repo string) (string, error) {\n\tdurationLogger := c.log(\"CreateFork\", owner, repo)\n\tdefer durationLogger()\n\n\tresp := struct {\n\t\tName string `json:\"name\"`\n\t}{}\n\n\t_, err := c.request(&request{\n\t\tmethod: http.MethodPost,\n\t\tpath: fmt.Sprintf(\"/repos/%s/%s/forks\", owner, repo),\n\t\torg: owner,\n\t\texitCodes: []int{202},\n\t}, &resp)\n\n\t// there are many reasons why GitHub may end up forking the\n\t// repo under a different name -- the repo got re-named, the\n\t// bot account already has a fork with that name, etc\n\treturn resp.Name, err\n}", "func (s *Basegff3Listener) ExitDocument(ctx *DocumentContext) {}", "func (s *BasePlSqlParserListener) EnterReference_partitioning(ctx *Reference_partitioningContext) {}", "func PubmedXML(xmlPaths *[]string, stdin *[]byte, outfn string, keywords *[]string, thread int) {\n\tif len(*xmlPaths) == 1 {\n\t\tthread = 1\n\t}\n\tif len(*stdin) > 0 {\n\t\t*xmlPaths = append(*xmlPaths, \"ParsePubmedXMLStdin\")\n\t}\n\tsem := make(chan bool, thread)\n\n\t//|os.O_APPEND\n\tvar of *os.File\n\tif outfn == \"\" {\n\t\tof = os.Stdout\n\t} else {\n\t\tof, err := os.OpenFile(outfn, os.O_CREATE|os.O_WRONLY, 0664)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tdefer of.Close()\n\t}\n\n\tvar err error\n\tfor i, xmlPath := range *xmlPaths {\n\t\tsem <- true\n\t\tgo func(xmlPath string, i int) {\n\t\t\tdefer func() {\n\t\t\t\t<-sem\n\t\t\t}()\n\t\t\tvar pubmed = PubmedArticleSet{}\n\t\t\tif xmlPath != \"ParsePubmedXMLStdin\" {\n\t\t\t\txmlData, err := os.ReadFile(xmlPath)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Warnln(err)\n\t\t\t\t}\n\t\t\t\terr = xml.Unmarshal(xmlData, &pubmed)\n\t\t\t} else if xmlPath == \"ParsePubmedXMLStdin\" && len(*stdin) > 0 {\n\t\t\t\terr = xml.Unmarshal(*stdin, &pubmed)\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tlog.Warnf(\"%v\", err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tjsonData, _ := json.MarshalIndent(pubmed.PubmedArticle, \"\", \" \")\n\t\t\tio.Copy(of, bytes.NewBuffer(jsonData))\n\t\t}(xmlPath, i)\n\t}\n\tfor i := 0; i < cap(sem); i++ {\n\t\tsem <- true\n\t}\n}", "func (j *Job) prelink() {\n\tfor _, tg := range j.Groups {\n\t\ttg.job = j\n\t\ttg.prelink()\n\t}\n}", "func parent(node *Node, isBuffered bool, udata userdata, push func(*Node, uint32),\n\tpushBuf func(*Node, interface{}, uint32)) error {\n\t//\n\tp := node.Parent()\n\tserial := udata.serial\n\tif p != nil {\n\t\tpush(p, serial) // forward parent node to next pipeline stage\n\t}\n\treturn nil\n}", "func PruneFile(parentFileName, childFileName string, ops FileHandlingOperations) error {\n\tchildFInfo, err := os.Stat(childFileName)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to get file info of child file %v\", childFileName)\n\t}\n\tparentFInfo, err := os.Stat(parentFileName)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to get file info of parent file %v\", parentFileName)\n\t}\n\n\t// ensure no directory\n\tif childFInfo.IsDir() || parentFInfo.IsDir() {\n\t\treturn fmt.Errorf(\"at least one file is directory, not a normal file\")\n\t}\n\n\t// may be caused by the expansion\n\tif childFInfo.Size() != parentFInfo.Size() {\n\t\tif childFInfo.Size() < parentFInfo.Size() {\n\t\t\treturn fmt.Errorf(\"file sizes are not equal and the parent file is larger than the child file\")\n\t\t}\n\t\tif err := os.Truncate(parentFileName, childFInfo.Size()); err != nil {\n\t\t\treturn errors.Wrap(err, \"failed to expand the parent file size before pruning\")\n\t\t}\n\t}\n\n\t// open child and parent files\n\tchildFileIo, err := NewDirectFileIoProcessor(childFileName, os.O_RDONLY, 0)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to open child file %v\", childFileName)\n\t}\n\tdefer childFileIo.Close()\n\n\tparentFileIo, err := NewDirectFileIoProcessor(parentFileName, os.O_RDWR, 0)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to open parent file %v\", parentFileName)\n\t}\n\tdefer parentFileIo.Close()\n\n\treturn prune(parentFileIo, childFileIo, childFInfo.Size(), ops)\n}", "func (s *server) crawlDoc(ctx context.Context, source string, importPath string, pdoc *doc.Package, hasSubdirs bool, nextCrawl time.Time) (*doc.Package, error) {\n\tmessage := []interface{}{source}\n\tdefer func() {\n\t\tmessage = append(message, importPath)\n\t\tlog.Println(message...)\n\t}()\n\n\tif !nextCrawl.IsZero() {\n\t\td := time.Since(nextCrawl) / time.Hour\n\t\tif d > 0 {\n\t\t\tmessage = append(message, \"late:\", int64(d))\n\t\t}\n\t}\n\n\tetag := \"\"\n\tif pdoc != nil {\n\t\tetag = pdoc.Etag\n\t\tmessage = append(message, \"etag:\", etag)\n\t}\n\n\tstart := time.Now()\n\tvar err error\n\tif strings.HasPrefix(importPath, \"code.google.com/p/go.\") {\n\t\t// Old import path for Go sub-repository.\n\t\tpdoc = nil\n\t\terr = gosrc.NotFoundError{Message: \"old Go sub-repo\", Redirect: \"golang.org/x/\" + importPath[len(\"code.google.com/p/go.\"):]}\n\t} else if blocked, e := s.db.IsBlocked(importPath); blocked && e == nil {\n\t\tpdoc = nil\n\t\terr = gosrc.NotFoundError{Message: \"blocked.\"}\n\t} else if testdataPat.MatchString(importPath) {\n\t\tpdoc = nil\n\t\terr = gosrc.NotFoundError{Message: \"testdata.\"}\n\t} else {\n\t\tvar pdocNew *doc.Package\n\t\tpdocNew, err = doc.Get(ctx, s.httpClient, importPath, etag)\n\t\tmessage = append(message, \"fetch:\", int64(time.Since(start)/time.Millisecond))\n\t\tif err == nil && pdocNew.Name == \"\" && !hasSubdirs {\n\t\t\tfor _, e := range pdocNew.Errors {\n\t\t\t\tmessage = append(message, \"err:\", e)\n\t\t\t}\n\t\t\tpdoc = nil\n\t\t\terr = gosrc.NotFoundError{Message: \"no Go files or subdirs\"}\n\t\t} else if _, ok := err.(gosrc.NotModifiedError); !ok {\n\t\t\tpdoc = pdocNew\n\t\t}\n\t}\n\n\tmaxAge := s.v.GetDuration(ConfigMaxAge)\n\tnextCrawl = start.Add(maxAge)\n\tswitch {\n\tcase strings.HasPrefix(importPath, \"github.com/\") || (pdoc != nil && len(pdoc.Errors) > 0):\n\t\tnextCrawl = start.Add(maxAge * 7)\n\tcase strings.HasPrefix(importPath, \"gist.github.com/\"):\n\t\t// Don't spend time on gists. It's silly thing to do.\n\t\tnextCrawl = start.Add(maxAge * 30)\n\t}\n\n\tif err == nil {\n\t\tmessage = append(message, \"put:\", pdoc.Etag)\n\t\tif err := s.put(ctx, pdoc, nextCrawl); err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\t\ts.publishCrawl(ctx, importPath)\n\t\treturn pdoc, nil\n\t} else if e, ok := err.(gosrc.NotModifiedError); ok {\n\t\tif pdoc.Status == gosrc.Active && !s.isActivePkg(importPath, e.Status) {\n\t\t\tif e.Status == gosrc.NoRecentCommits {\n\t\t\t\te.Status = gosrc.Inactive\n\t\t\t}\n\t\t\tmessage = append(message, \"archive\", e)\n\t\t\tpdoc.Status = e.Status\n\t\t\tif err := s.db.Put(ctx, pdoc, nextCrawl, false); err != nil {\n\t\t\t\tlog.Printf(\"ERROR db.Put(%q): %v\", importPath, err)\n\t\t\t}\n\t\t} else {\n\t\t\t// Touch the package without updating and move on to next one.\n\t\t\tmessage = append(message, \"touch\")\n\t\t\tif err := s.db.SetNextCrawl(importPath, nextCrawl); err != nil {\n\t\t\t\tlog.Printf(\"ERROR db.SetNextCrawl(%q): %v\", importPath, err)\n\t\t\t}\n\t\t}\n\t\ts.publishCrawl(ctx, importPath)\n\t\treturn pdoc, nil\n\t} else if e, ok := err.(gosrc.NotFoundError); ok {\n\t\tmessage = append(message, \"notfound:\", e)\n\t\tif err := s.db.Delete(ctx, importPath); err != nil {\n\t\t\tlog.Printf(\"ERROR db.Delete(%q): %v\", importPath, err)\n\t\t}\n\t\treturn nil, e\n\t} else {\n\t\tmessage = append(message, \"ERROR:\", err)\n\t\treturn nil, err\n\t}\n}", "func PREvent(payload []byte) error {\n\tclient := github.NewClient(nil)\n\t// unmarshal the PR Hook\n\tevt, err := preProcessPRHook(payload)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// get a list of changed files and download the new repo\n\tfiles, filename, err := getFilesAndRepo(client, &evt)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// extract the repo to a tmp directory\n\ttmpdir, err := extract(filename)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"while extracting repo\")\n\t}\n\n\t// get the root directory\n\tcontents, err := ioutil.ReadDir(tmpdir)\n\tif len(contents) != 1 {\n\t\treturn fmt.Errorf(\"wrong number of items in the tmp directory: expected 1\")\n\t} else if err != nil {\n\t\treturn err\n\t}\n\troot := path.Join(tmpdir, contents[0].Name())\n\n\tclonePairs, err := clone.NewDetector().Detect(root)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"\\n***\\tClone Pairs\\t***\\n\")\n\tfor _, pair := range clonePairs {\n\t\tfmt.Printf(\"%v\\n\", pair)\n\t}\n\tfmt.Printf(\"\\n***\\tChanged files \\t***\\n\")\n\tfor _, cfile := range files {\n\t\tfmt.Printf(\"%v\\n\", cfile.GetFilename())\n\t}\n\n\t// Consider only the clones that are in the diff\n\trelPairs := make([]clone.ClonePair, 0) // absolute\n\trelFiles := make(map[string]bool) // absolute\n\tfor _, pair := range clonePairs {\n\t\tcontains := false\n\t\tfor _, cfile := range files {\n\t\t\tif path.Join(root, cfile.GetFilename()) == pair.First.Filename {\n\t\t\t\trelFiles[pair.First.Filename] = true\n\t\t\t\trelFiles[pair.Second.Filename] = true\n\t\t\t\tcontains = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif contains {\n\t\t\trelPairs = append(relPairs, pair)\n\t\t}\n\t}\n\n\t// Create api.File for reach relevant clonepair\n\tfs := processFileset(root, relFiles) // relative\n\tprs := stripRoot(root, relPairs) // relative\n\n\terr = api.SavePrEvent(evt.PullRequest, prs, fs)\n\tif err != nil {\n\t\tlog.Printf(\"%s\", err)\n\t}\n\terr = cleanTmp(tmpdir)\n\n\treturn err\n}", "func Build_pdf(appl *App) error {\n\n var err error\n\tvar cmdArgs []string\n\tvar cmdArgs_cover []string\n\tvar cmdArgs_resume []string\n var cmdArgs_CV []string\n\n if appl.Option <= 6 && appl.Option > 0 {\n contents, err := Replace_strings(TEX_COVER_TEMPL, appl.KvMap_tex)\n \tif err != nil {\n \t\tpanic(err)\n \t}\n \terr = write_file(\"cover.tex\", contents)\n }\n\n switch {\n case appl.Option == 1: // Everything w/ ref included as one file\n go appl.text_cover()\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_all_ref\\\".tex\"}\n case appl.Option == 2: // Everything w/ ref as seperate files\n go appl.text_cover()\n cmdArgs_cover = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_cover\\\".tex\"}\n cmdArgs_resume = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_resume\\\".tex\"}\n cmdArgs_CV = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_CV_ref\\\".tex\"}\n case appl.Option == 3: // Cover + CV w/ ref included as one file\n go appl.text_cover()\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_cover_CV_ref\\\".tex\"}\n case appl.Option == 4: // Cover + CV w/ ref as seperate files\n go appl.text_cover()\n cmdArgs_cover = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_cover\\\".tex\"}\n cmdArgs_CV = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_CV_ref\\\".tex\"}\n case appl.Option == 5: // Cover + Resume included as one file\n go appl.text_cover()\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_cover_resume\\\".tex\"}\n case appl.Option == 6: // Cover + Resume as seperate files\n go appl.text_cover()\n cmdArgs_cover = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_cover\\\".tex\"}\n cmdArgs_resume = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_resume\\\".tex\"}\n case appl.Option == 7: // CV w/ ref\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_CV_ref\\\".tex\"}\n case appl.Option == 8: // CV w/0 ref\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_CV\\\".tex\"}\n case appl.Option == 9: // just the resume\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_resume\\\".tex\"}\n default: // the follow up\n go appl.text_follow_up()\n cmdArgs = []string{\"-synctex=1\", \"-interaction=nonstopmode\", \"\\\"main_CV_ref\\\".tex\"}\n }\n\n if len(cmdArgs) > 0 {\n err = cmd.Run_cmd(TEX_CMD, cmdArgs)\n if err != nil {\n panic(err)\n }\n }\n\n if len(cmdArgs_cover) > 0 {\n err = cmd.Run_cmd(TEX_CMD, cmdArgs_cover)\n if err != nil {\n panic(err)\n }\n }\n\n if len(cmdArgs_resume) > 0 {\n err = cmd.Run_cmd(TEX_CMD, cmdArgs_resume)\n if err != nil {\n panic(err)\n }\n }\n\n if len(cmdArgs_CV) > 0 {\n err = cmd.Run_cmd(TEX_CMD, cmdArgs_CV)\n if err != nil {\n panic(err)\n }\n }\n\n err = rename_files(appl)\n if err != nil {\n panic(err)\n }\n\n\treturn nil\n}", "func uploadDocument(c echo.Context) error {\n\n\tclaim, err := securityCheck(c, \"upload\")\n\tif err != nil {\n\t\treturn c.String(http.StatusUnauthorized, \"bye\")\n\t}\n\n\treq := c.Request()\n\t// req.ParseMultipartForm(16 << 20) // Max memory 16 MiB\n\n\tdoc := &DBdoc{}\n\tdoc.ID = 0\n\tdoc.Name = c.FormValue(\"desc\")\n\tdoc.Type = c.FormValue(\"type\")\n\tRev, _ := strconv.Atoi(c.FormValue(\"rev\"))\n\tdoc.RefId, _ = strconv.Atoi(c.FormValue(\"ref_id\"))\n\tdoc.UserId, _ = getClaimedUser(claim)\n\tlog.Println(\"Passed bools\", c.FormValue(\"worker\"), c.FormValue(\"sitemgr\"), c.FormValue(\"contractor\"))\n\tdoc.Worker = (c.FormValue(\"worker\") == \"true\")\n\tdoc.Sitemgr = (c.FormValue(\"sitemgr\") == \"true\")\n\tdoc.Contractor = (c.FormValue(\"contractor\") == \"true\")\n\tdoc.Filesize = 0\n\n\t// make upload dir if not already there, ignore errors\n\tos.Mkdir(\"uploads\", 0666)\n\n\t// Read files\n\t// files := req.MultipartForm.File[\"file\"]\n\tfiles, _ := req.FormFile(\"file\")\n\tpath := \"\"\n\t//log.Println(\"files =\", files)\n\t// for _, f := range files {\n\tf := files\n\tdoc.Filename = f.Filename\n\n\t// Source file\n\tsrc, err := f.Open()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer src.Close()\n\n\t// While filename exists, append a version number to it\n\tdoc.Path = \"uploads/\" + doc.Filename\n\tgotFile := false\n\trevID := 1\n\n\tfor !gotFile {\n\t\tlog.Println(\"Try with path=\", doc.Path)\n\t\tdst, err := os.OpenFile(doc.Path, os.O_EXCL|os.O_RDWR|os.O_CREATE, 0666)\n\t\tif err != nil {\n\t\t\tif os.IsExist(err) {\n\t\t\t\tlog.Println(doc.Path, \"already exists\")\n\t\t\t\tdoc.Path = fmt.Sprintf(\"uploads/%s.%d\", doc.Filename, revID)\n\t\t\t\trevID++\n\t\t\t\tif revID > 999 {\n\t\t\t\t\tlog.Println(\"RevID limit exceeded, terminating\")\n\t\t\t\t\treturn c.String(http.StatusBadRequest, doc.Path)\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Println(\"Created file\", doc.Path)\n\t\t\tgotFile = true\n\t\t\tdefer dst.Close()\n\n\t\t\tif doc.Filesize, err = io.Copy(dst, src); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// If we get here, then the file transfer is complete\n\n\t\t\t// If doc does not exist by this filename, create it\n\t\t\t// If doc does exist, create rev, and update header details of doc\n\n\t\t\tif Rev == 0 {\n\t\t\t\t// New doc\n\t\t\t\terr := DB.InsertInto(\"doc\").\n\t\t\t\t\tWhitelist(\"name\", \"filename\", \"path\", \"worker\", \"sitemgr\", \"contractor\", \"type\", \"ref_id\", \"filesize\", \"user_id\").\n\t\t\t\t\tRecord(doc).\n\t\t\t\t\tReturning(\"id\").\n\t\t\t\t\tQueryScalar(&doc.ID)\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Println(\"Inserting Record:\", err.Error())\n\t\t\t\t} else {\n\t\t\t\t\tlog.Println(\"Inserted new doc with ID\", doc.ID)\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Revision to existing doc\n\t\t\t\tdocRev := &DBdocRev{}\n\t\t\t\tdocRev.Path = doc.Path\n\t\t\t\tdocRev.Filename = doc.Filename\n\t\t\t\tdocRev.Filesize = doc.Filesize\n\t\t\t\tdocRev.DocId = doc.ID\n\t\t\t\tdocRev.ID = Rev\n\t\t\t\tdocRev.Descr = doc.Name\n\t\t\t\tdocRev.UserId = doc.UserId\n\n\t\t\t\t_, err := DB.InsertInto(\"doc_rev\").\n\t\t\t\t\tWhitelist(\"doc_id\", \"id\", \"descr\", \"filename\", \"path\", \"filesize\", \"user_id\").\n\t\t\t\t\tRecord(docRev).\n\t\t\t\t\tExec()\n\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Println(\"Inserting revision:\", err.Error())\n\t\t\t\t} else {\n\t\t\t\t\tlog.Println(\"Inserted new revision with ID\", docRev.ID)\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t} // managed to create the new file\n\t\t// } // loop until we have created a file\n\t} // foreach file being uploaded this batch\n\n\treturn c.String(http.StatusOK, path)\n}", "func (r *readability) prepareDocument(doc *goquery.Document) {\n\t// Remove tags\n\tdoc.Find(\"script\").Remove()\n\tdoc.Find(\"noscript\").Remove()\n\tdoc.Find(\"style\").Remove()\n\tdoc.Find(\"link\").Remove()\n\n\t// Replace font tags to span\n\tdoc.Find(\"font\").Each(func(_ int, font *goquery.Selection) {\n\t\thtml, _ := font.Html()\n\t\tfont.ReplaceWithHtml(\"<span>\" + html + \"</span>\")\n\t})\n}", "func (fs *fsMutable) preCreateCheck(parentInode fuseops.InodeID, lk []byte) error {\n\t// check parent exists\n\tkey := formKey(parentInode)\n\te, found := fs.iNodeStore.Get(key)\n\tif !found {\n\t\treturn jfuse.ENOENT\n\t}\n\n\t// parent is a directory\n\tn := e.(*nodeEntry)\n\tif !n.attr.Mode.IsDir() {\n\t\treturn jfuse.ENOTDIR\n\t}\n\n\t// check child name not taken\n\t_, found = fs.lookupTree.Get(lk)\n\tif found {\n\t\treturn jfuse.EEXIST\n\t}\n\treturn nil\n}", "func (p *Generator) Run() {\n\tp.Title = p.ProjectTitle\n\tfileName := markdownName(p.OutputFileName, path.Base(p.ProjectTitle))\n\tp.Module = p.RootModule\n\tif err := p.CreateMarkdown(p.Templates[p.StartTemplateIndex], path.Join(p.OutputDir, fileName), p); err != nil {\n\t\tp.Log.Error(\"Error creating project markdown:\", err)\n\t}\n\n\tvar progress *pb.ProgressBar\n\tdefer func() {\n\t\tif progress != nil && progress.IsStarted() {\n\t\t\tprogress.Finish()\n\t\t\tfmt.Printf(\"The generated files are output to folder `%s`\\n\", p.OutputDir)\n\t\t}\n\t}()\n\n\tvar wg sync.WaitGroup\n\tvar diagramCreator = func(inMap map[string]string, f func(fs afero.Fs, filename string, data string) error, progress *pb.ProgressBar) {\n\t\tfor fileName, contents := range inMap {\n\t\t\twg.Add(1)\n\t\t\tgo func(fileName, contents string) {\n\t\t\t\tmaxCreators <- struct{}{}\n\t\t\t\tdefer func() { <-maxCreators }()\n\n\t\t\t\tvar err = f(p.Fs, fileName, contents)\n\t\t\t\tif err != nil {\n\t\t\t\t\tp.Log.Error(\"Error generating file:\", err)\n\t\t\t\t\tos.Exit(1)\n\t\t\t\t}\n\t\t\t\tif progress != nil {\n\t\t\t\t\tprogress.Increment()\n\t\t\t\t}\n\t\t\t\twg.Done()\n\t\t\t}(fileName, contents)\n\t\t}\n\t}\n\n\tif p.Mermaid {\n\t\tprogress = pb.Full.Start(len(p.MermaidFilesToCreate))\n\t\tdiagramCreator(p.MermaidFilesToCreate, GenerateAndWriteMermaidDiagram, progress)\n\t} else {\n\t\tif strings.Contains(p.PlantumlService, \".jar\") {\n\t\t\tif !p.Server {\n\t\t\t\tdiagramCreator(p.FilesToCreate, p.PUMLFile, progress)\n\t\t\t\tstart := time.Now()\n\t\t\t\tif err := PlantUMLJava(p.PlantumlService, p.OutputDir); err != nil {\n\t\t\t\t\tp.Log.Error(err)\n\t\t\t\t}\n\t\t\t\telapsed := time.Since(start)\n\t\t\t\tfmt.Println(\"Generating took \", elapsed)\n\t\t\t}\n\t\t} else {\n\t\t\tprogress = pb.Full.Start(len(p.FilesToCreate))\n\t\t\tdiagramCreator(p.FilesToCreate, HttpToFile, progress)\n\t\t}\n\t}\n\n\tif p.Redoc {\n\t\tif progress.IsStarted() {\n\t\t\tprogress.SetTotal(progress.Total() + int64(len(p.RedocFilesToCreate)))\n\t\t} else {\n\t\t\tprogress = pb.Full.Start(len(p.RedocFilesToCreate))\n\t\t}\n\t\tdiagramCreator(p.RedocFilesToCreate, GenerateAndWriteRedoc, progress)\n\t}\n\n\tif (p.ImageTags || p.DisableImages) && !p.Redoc {\n\t\tlogrus.Info(\"Skipping Image creation\")\n\t\treturn\n\t}\n\n\twg.Wait()\n}", "func (pc *PageCreate) Exec(ctx context.Context) error {\n\t_, err := pc.Save(ctx)\n\treturn err\n}", "func (b *MonteCarloPolicy) Split() []couchdb.CouchDoc {\n\tvar out []couchdb.CouchDoc\n\treturn out\n}", "func (g *Gitlab) ForkRepository(ctx context.Context, repo scm.Repository, newOwner string) (scm.Repository, error) {\n\tr := repo.(repository)\n\n\t// Get the username of the fork (logged in user if none is set)\n\townerUsername := newOwner\n\tif newOwner == \"\" {\n\t\tcurrentUser, err := g.getCurrentUser(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\townerUsername = currentUser.Username\n\t}\n\n\t// Check if the project already exist\n\tproject, resp, err := g.glClient.Projects.GetProject(\n\t\tfmt.Sprintf(\"%s/%s\", ownerUsername, r.name),\n\t\tnil,\n\t\tgitlab.WithContext(ctx),\n\t)\n\tif err == nil { // Already forked, just return it\n\t\treturn g.convertProject(project)\n\t} else if resp.StatusCode != http.StatusNotFound { // If the error was that the project does not exist, continue to fork it\n\t\treturn nil, err\n\t}\n\n\tnewRepo, _, err := g.glClient.Projects.ForkProject(r.pid, &gitlab.ForkProjectOptions{\n\t\tNamespace: &newOwner,\n\t}, gitlab.WithContext(ctx))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor i := 0; i < 10; i++ {\n\t\trepo, _, err := g.glClient.Projects.GetProject(newRepo.ID, nil, gitlab.WithContext(ctx))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tif repo.ImportStatus == \"finished\" {\n\t\t\treturn g.convertProject(newRepo)\n\t\t}\n\n\t\ttime.Sleep(time.Second * 3)\n\t}\n\n\treturn nil, errors.New(\"time waiting for fork to complete was exceeded\")\n}", "func (me *inode) setParent(newParent *inode) {\n\toldParent := me.Parent\n\tif oldParent == newParent {\n\t\treturn\n\t}\n\tif oldParent != nil {\n\t\tif paranoia {\n\t\t\tch := oldParent.Children[me.Name]\n\t\t\tif ch == nil {\n\t\t\t\tpanic(fmt.Sprintf(\"parent has no child named %v\", me.Name))\n\t\t\t}\n\t\t}\n\t\toldParent.Children[me.Name] = nil, false\n\n\t\tif oldParent.mountPoint != nil && oldParent.mountPoint.unmountPending &&\n\t\t\tlen(oldParent.Children) == 0 {\n\t\t\toldParent.mountPoint = nil\n\t\t\tif oldParent.Parent != nil {\n\t\t\t\toldParent.mount = oldParent.Parent.mount\n\t\t\t}\n\t\t}\n\n\t\tme.Parent = nil\n\t}\n\tif newParent != nil {\n\t\tme.Parent = newParent\n\n\t\tif paranoia {\n\t\t\tch := me.Parent.Children[me.Name]\n\t\t\tif ch != nil {\n\t\t\t\tpanic(fmt.Sprintf(\"Already have an inode with same name: %v: %v\", me.Name, ch))\n\t\t\t}\n\t\t}\n\n\t\tme.Parent.Children[me.Name] = me\n\t}\n}", "func GetLaunchpadDoc(client *http.Client, match map[string]string, installGOPATH string, node *Node, cmdFlags map[string]bool) ([]string, error) {\n\n\tif match[\"project\"] != \"\" && match[\"series\"] != \"\" {\n\t\trc, err := httpGet(client, expand(\"https://code.launchpad.net/{project}{series}/.bzr/branch-format\", match), nil)\n\t\tswitch {\n\t\tcase err == nil:\n\t\t\trc.Close()\n\t\t\t// The structure of the import path is launchpad.net/{root}/{dir}.\n\t\tcase isNotFound(err):\n\t\t\t// The structure of the import path is is launchpad.net/{project}/{dir}.\n\t\t\tmatch[\"repo\"] = match[\"project\"]\n\t\t\tmatch[\"dir\"] = expand(\"{series}{dir}\", match)\n\t\tdefault:\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// bundle and snapshot will have commit 'B' and 'S',\n\t// but does not need to download dependencies.\n\tisCheckImport := len(node.Value) == 0\n\n\tvar downloadPath string\n\t// Check if download with specific revision.\n\tif isCheckImport || len(node.Value) == 1 {\n\t\tdownloadPath = expand(\"https://bazaar.launchpad.net/+branch/{repo}/tarball\", match)\n\t\tnode.Type = \"commit\"\n\t} else {\n\t\tdownloadPath = expand(\"https://bazaar.launchpad.net/+branch/{repo}/tarball/\"+node.Value, match)\n\t}\n\n\t// Scrape the repo browser to find the project revision and individual Go files.\n\tp, err := HttpGetBytes(client, downloadPath, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tprojectPath := expand(\"launchpad.net/{repo}\", match)\n\tinstallPath := installGOPATH + \"/src/\" + projectPath\n\tnode.ImportPath = projectPath\n\n\t// Remove old files.\n\tos.RemoveAll(installPath + \"/\")\n\t// Create destination directory.\n\tos.MkdirAll(installPath+\"/\", os.ModePerm)\n\n\tgzr, err := gzip.NewReader(bytes.NewReader(p))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer gzr.Close()\n\n\ttr := tar.NewReader(gzr)\n\n\tisCodeOnly := cmdFlags[\"-c\"]\n\tvar autoPath string // Auto path is the root path that generated by bitbucket.org.\n\t// Get source file data.\n\tdirs := make([]string, 0, 5)\n\tfor {\n\t\th, err := tr.Next()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tfn := h.FileInfo().Name()\n\t\t// Check root path.\n\t\tif len(autoPath) == 0 {\n\t\t\tautoPath = fn[:strings.Index(fn, match[\"repo\"])+len(match[\"repo\"])]\n\t\t}\n\t\tabsPath := strings.Replace(fn, autoPath, installPath, 1)\n\n\t\tswitch {\n\t\tcase h.FileInfo().IsDir(): // Directory.\n\t\t\t// Check if current directory is example.\n\t\t\tif !(!cmdFlags[\"-e\"] && strings.Contains(absPath, \"example\")) {\n\t\t\t\tdirs = append(dirs, absPath)\n\t\t\t}\n\t\tcase isCodeOnly && !utils.IsDocFile(path.Base(absPath)):\n\t\t\tcontinue\n\t\tcase !strings.HasPrefix(fn, \".\"):\n\t\t\t// Create diretory before create file.\n\t\t\tos.MkdirAll(path.Dir(absPath)+\"/\", os.ModePerm)\n\n\t\t\t// Get data from archive.\n\t\t\tfbytes := make([]byte, h.Size)\n\t\t\tif _, err := io.ReadFull(tr, fbytes); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// Write data to file\n\t\t\tfw, err := os.Create(absPath)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t_, err = fw.Write(fbytes)\n\t\t\tfw.Close()\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t}\n\n\tvar imports []string\n\n\t// Check if need to check imports.\n\tif isCheckImport {\n\t\tfor _, d := range dirs {\n\t\t\timportPkgs, err := CheckImports(d+\"/\", match[\"importPath\"])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\timports = append(imports, importPkgs...)\n\t\t}\n\t}\n\n\treturn imports, err\n}", "func Execute() {\n\tif len(os.Args[1:]) > 0 && os.Args[1] == \"gendocs\" {\n\t\tfmt.Println(\"Generating docs...\")\n\t\terr := doc.GenMarkdownTree(rootCmd, \"./docs\")\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\treturn\n\t}\n\n\tif err := rootCmd.Execute(); err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n}", "func getCommitAndMakeFile(ctx context.Context, c *cdp.Client, info DocumentInfo, commitIndex int, numAuthorCreated map[string]int, commitsPath string, domLoadTimeout time.Duration) error {\n\n\tcommitMessage, details, err := helpers.CommitIterator(ctx, c, numAuthorCreated)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = helpers.MakeCommitFile(commitMessage, details.Hash, commitsPath, commitIndex)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = helpers.Navigate(ctx, c.Page, details.NextCommitHref, domLoadTimeout)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (syncer *Syncer) syncFork(ctx context.Context, incoming *types.TipSet, known *types.TipSet) ([]*types.TipSet, error) {\n\n\tchkpt := syncer.GetCheckpoint()\n\tif known.Key() == chkpt {\n\t\treturn nil, ErrForkCheckpoint\n\t}\n\n\t// TODO: Does this mean we always ask for ForkLengthThreshold blocks from the network, even if we just need, like, 2?\n\t// Would it not be better to ask in smaller chunks, given that an ~ForkLengthThreshold is very rare?\n\ttips, err := syncer.Exchange.GetBlocks(ctx, incoming.Parents(), int(build.ForkLengthThreshold))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnts, err := syncer.store.LoadTipSet(known.Parents())\n\tif err != nil {\n\t\treturn nil, xerrors.Errorf(\"failed to load next local tipset: %w\", err)\n\t}\n\n\tfor cur := 0; cur < len(tips); {\n\t\tif nts.Height() == 0 {\n\t\t\tif !syncer.Genesis.Equals(nts) {\n\t\t\t\treturn nil, xerrors.Errorf(\"somehow synced chain that linked back to a different genesis (bad genesis: %s)\", nts.Key())\n\t\t\t}\n\t\t\treturn nil, xerrors.Errorf(\"synced chain forked at genesis, refusing to sync; incoming: %s\", incoming.Cids())\n\t\t}\n\n\t\tif nts.Equals(tips[cur]) {\n\t\t\treturn tips[:cur+1], nil\n\t\t}\n\n\t\tif nts.Height() < tips[cur].Height() {\n\t\t\tcur++\n\t\t} else {\n\t\t\t// We will be forking away from nts, check that it isn't checkpointed\n\t\t\tif nts.Key() == chkpt {\n\t\t\t\treturn nil, ErrForkCheckpoint\n\t\t\t}\n\n\t\t\tnts, err = syncer.store.LoadTipSet(nts.Parents())\n\t\t\tif err != nil {\n\t\t\t\treturn nil, xerrors.Errorf(\"loading next local tipset: %w\", err)\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil, ErrForkTooLong\n}", "func (s *langsvr) newDocument(uri string, language string, version int, body Body) *Document {\n\tif d, exists := s.documents[uri]; exists {\n\t\tpanic(fmt.Errorf(\"Attempting to create a document that already exists. %+v\", d))\n\t}\n\td := &Document{}\n\td.uri = uri\n\td.path, _ = URItoPath(uri)\n\td.language = language\n\td.version = version\n\td.server = s\n\td.body = body\n\ts.documents[uri] = d\n\treturn d\n}", "func ProofOfServiceDoc(proofOfService models.ProofOfServiceDoc, storer storage.FileStorer) (*ghcmessages.ProofOfServiceDoc, error) {\n\n\tuploads := make([]*ghcmessages.Upload, len(proofOfService.PrimeUploads))\n\tif proofOfService.PrimeUploads != nil && len(proofOfService.PrimeUploads) > 0 {\n\t\tfor i, primeUpload := range proofOfService.PrimeUploads {\n\t\t\turl, err := storer.PresignedURL(primeUpload.Upload.StorageKey, primeUpload.Upload.ContentType)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tuploads[i] = Upload(storer, primeUpload.Upload, url)\n\t\t}\n\t}\n\n\treturn &ghcmessages.ProofOfServiceDoc{\n\t\tUploads: uploads,\n\t}, nil\n}", "func getForkOldNewCommon(ctx context.Context, t *testing.T, chainStore *chain.Store, blockSource *th.TestFetcher, dstP *SyncerTestParams, a, b, c uint) (types.TipSet, types.TipSet, types.TipSet) {\n\t// Add a - 1 tipsets to the head of the chainStore.\n\trequireGrowChain(ctx, t, blockSource, chainStore, a, dstP)\n\tcommonAncestor := requireHeadTipset(t, chainStore)\n\n\tif c > 0 {\n\t\t// make the first fork tipset (need to do manually to set nonce)\n\t\tsigner, ki := types.NewMockSignersAndKeyInfo(1)\n\t\tminerWorker, err := ki[0].Address()\n\t\trequire.NoError(t, err)\n\t\tfakeChildParams := th.FakeChildParams{\n\t\t\tParent: commonAncestor,\n\t\t\tGenesisCid: dstP.genCid,\n\t\t\tSigner: signer,\n\t\t\tMinerWorker: minerWorker,\n\t\t\tStateRoot: dstP.genStateRoot,\n\t\t\tNonce: uint64(1),\n\t\t}\n\n\t\tfirstForkBlock := th.RequireMkFakeChild(t, fakeChildParams)\n\t\trequirePutBlocks(t, blockSource, firstForkBlock)\n\t\tfirstForkTs := th.RequireNewTipSet(t, firstForkBlock)\n\t\tfirstForkTsas := &chain.TipSetAndState{\n\t\t\tTipSet: firstForkTs,\n\t\t\tTipSetStateRoot: dstP.genStateRoot,\n\t\t}\n\t\trequire.NoError(t, chainStore.PutTipSetAndState(ctx, firstForkTsas))\n\t\terr = chainStore.SetHead(ctx, firstForkTs)\n\t\trequire.NoError(t, err)\n\n\t\t// grow the fork by (c - 1) blocks (c total)\n\t\trequireGrowChain(ctx, t, blockSource, chainStore, c-1, dstP)\n\t}\n\n\toldHead := requireHeadTipset(t, chainStore)\n\n\t// go back and complete the original chain\n\terr := chainStore.SetHead(ctx, commonAncestor)\n\trequire.NoError(t, err)\n\trequireGrowChain(ctx, t, blockSource, chainStore, b, dstP)\n\tnewHead := requireHeadTipset(t, chainStore)\n\n\treturn oldHead, newHead, commonAncestor\n}", "func (c *Contents) PostProcess() error {\n\treturn nil\n}", "func commitizerMain(timeout time.Duration, commitsPath string, numAuthorCreated map[string]int) error {\n\tctx, cancel := context.WithTimeout(context.Background(), timeout)\n\tdefer cancel()\n\n\t// Use the DevTools HTTP/JSON API to manage targets (e.g. pages, webworkers).\n\tdevt := devtool.New(\"http://127.0.0.1:9222\")\n\tpt, err := devt.Get(ctx, devtool.Page)\n\tif err != nil {\n\t\tpt, err = devt.Create(ctx)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"couldn't create cdp target\")\n\t\t}\n\t}\n\n\t// Initiate a new RPC connection to the Chrome DevTools Protocol target.\n\tconn, err := rpcc.DialContext(ctx, pt.WebSocketDebuggerURL)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"couldn't initiate new rpc connection with cdp target\")\n\t}\n\tdefer conn.Close() // Leaving connections open will leak memory.\n\n\tc := cdp.NewClient(conn)\n\n\tdomLoadTimeout := 5 * time.Second\n\terr = helpers.Navigate(ctx, c.Page, *repoURL, domLoadTimeout)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar info DocumentInfo\n\n\terr = navigateToBranch(ctx, c, info, *branchName, domLoadTimeout)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Loop for getting *numCommits number of commits by calling getCommitAndMakeFile and making commit file every time\n\tcommitIndex := 0\n\tfor commitIndex < *numCommits {\n\t\terr = getCommitAndMakeFile(ctx, c, info, commitIndex, numAuthorCreated, commitsPath, domLoadTimeout)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tcommitIndex += 1\n\t\tfmt.Printf(\"Commit %d obtained!\\n\", commitIndex)\n\t}\n\treturn nil\n}", "func forkAndMountat(dirfd uintptr, source, target, fstype, flags, data uintptr, pipefd int) (pid uintptr, errno syscall.Errno) {\n\n\t// block signal during clone\n\tbeforeFork()\n\n\t// the cloned thread shares the open file descriptor, but the thread\n\t// never be reused by runtime.\n\tpid, _, errno = syscall.RawSyscall6(syscall.SYS_CLONE, uintptr(syscall.SIGCHLD)|syscall.CLONE_FILES, 0, 0, 0, 0, 0)\n\tif errno != 0 || pid != 0 {\n\t\t// restore all signals\n\t\tafterFork()\n\t\treturn\n\t}\n\n\t// restore all signals\n\tafterForkInChild()\n\n\t// change working dir\n\t_, _, errno = syscall.RawSyscall(syscall.SYS_FCHDIR, dirfd, 0, 0)\n\tif errno != 0 {\n\t\tgoto childerr\n\t}\n\t_, _, errno = syscall.RawSyscall6(syscall.SYS_MOUNT, source, target, fstype, flags, data, 0)\n\nchilderr:\n\t_, _, errno = syscall.RawSyscall(syscall.SYS_WRITE, uintptr(pipefd), uintptr(unsafe.Pointer(&errno)), unsafe.Sizeof(errno))\n\tsyscall.RawSyscall(syscall.SYS_EXIT, uintptr(errno), 0, 0)\n\tpanic(\"unreachable\")\n}", "func stage2() {\n\tlog.Println(\"stage 2\")\n\n\tminimal, err := pdf.Open(\"h7-minimal.pdf\")\n\tif err != nil {\n\t\tlog.Fatalln(errgo.Details(err))\n\t}\n\n\tannotation := minimal.Get(pdf.ObjectReference{ObjectNumber: 10}).(pdf.Dictionary)\n\tannotation[pdf.Name(\"Contents\")] = pdf.String(\"Modified Text #3\")\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 10},\n\t\tObject: annotation,\n\t})\n\n\terr = minimal.Save()\n\tif err != nil {\n\t\tlog.Fatalln(errgo.Details(err))\n\t}\n}", "func (c *GitCommit) Parent(n uint) *GitCommit {\n\treturn &GitCommit{\n\t\tRepo: c.Repo,\n\t\tCommit: c.Commit.Parent(n),\n\t}\n}", "func (fup *folderUpdatePrepper) prepTree(\n\tctx context.Context, lState *kbfssync.LockState, unmergedChains *crChains,\n\tnewMD *RootMetadata, chargedTo keybase1.UserOrTeamID, node *pathTreeNode,\n\tstopAt data.BlockPointer, dbm dirBlockMap, newFileBlocks fileBlockMap,\n\tdirtyBcache data.DirtyBlockCacheSimple, bps blockPutState,\n\tcopyBehavior prepFolderCopyBehavior) error {\n\t// If this has no children, then sync it, as far back as stopAt.\n\tif len(node.children) == 0 {\n\t\t// Look for the directory block or the new file block.\n\t\tentryType := data.Dir\n\t\thasDirBlock, err := dbm.hasBlock(ctx, node.ptr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// non-nil exactly when entryType != Dir.\n\t\tvar block data.Block\n\t\tvar fblock *data.FileBlock\n\t\tif hasDirBlock {\n\t\t\tdirBlock, err := dbm.getBlock(ctx, node.ptr)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tblock = dirBlock\n\t\t} else {\n\t\t\t// This must be a file, so look it up in the parent\n\t\t\tif node.parent == nil {\n\t\t\t\treturn fmt.Errorf(\"No parent found for node %v while \"+\n\t\t\t\t\t\"syncing path %v\", node.ptr, node.mergedPath.Path)\n\t\t\t}\n\n\t\t\tvar err error\n\t\t\tfblock, err = newFileBlocks.GetTopBlock(\n\t\t\t\tctx, node.parent.ptr, node.mergedPath.TailName())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tblock = fblock\n\t\t\tentryType = data.File // TODO: FIXME for Ex and Sym\n\t\t}\n\n\t\t// For an indirect file block, make sure a new\n\t\t// reference is made for every child block.\n\t\tif copyBehavior == prepFolderCopyIndirectFileBlocks &&\n\t\t\tentryType != data.Dir && fblock.IsInd {\n\t\t\tvar infos []data.BlockInfo\n\t\t\tvar err error\n\n\t\t\t// If journaling is enabled, new references aren't\n\t\t\t// supported. We have to fetch each block and ready\n\t\t\t// it. TODO: remove this when KBFS-1149 is fixed.\n\t\t\tif TLFJournalEnabled(fup.config, fup.id()) {\n\t\t\t\tinfos, err = fup.blocks.UndupChildrenInCopy(\n\t\t\t\t\tctx, lState, newMD.ReadOnly(), node.mergedPath, bps,\n\t\t\t\t\tdirtyBcache, fblock)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Ready any mid-level internal children.\n\t\t\t\t_, err = fup.blocks.ReadyNonLeafBlocksInCopy(\n\t\t\t\t\tctx, lState, newMD.ReadOnly(), node.mergedPath, bps,\n\t\t\t\t\tdirtyBcache, fblock)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tinfos, err = fup.blocks.\n\t\t\t\t\tGetIndirectFileBlockInfosWithTopBlock(\n\t\t\t\t\t\tctx, lState, newMD.ReadOnly(), node.mergedPath, fblock)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tfor _, info := range infos {\n\t\t\t\t\t// The indirect blocks were already added to\n\t\t\t\t\t// childBps, so only add the dedup'd leaf blocks.\n\t\t\t\t\tif info.RefNonce != kbfsblock.ZeroRefNonce {\n\t\t\t\t\t\terr = bps.AddNewBlock(\n\t\t\t\t\t\t\tctx, info.BlockPointer, nil, data.ReadyBlockData{}, nil)\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tfor _, info := range infos {\n\t\t\t\tnewMD.AddRefBlock(info)\n\t\t\t}\n\t\t}\n\n\t\t// Assume the mtime/ctime are already fixed up in the blocks\n\t\t// in the dbm.\n\t\t_, _, err = fup.prepUpdateForPath(\n\t\t\tctx, lState, chargedTo, newMD, block, node.ptr,\n\t\t\t*node.mergedPath.ParentPath(), node.mergedPath.TailName(),\n\t\t\tentryType, false, false, stopAt, dbm, bps)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn nil\n\t}\n\n\t// If there is more than one child, use this node as the stopAt\n\t// since it is the branch point, except for the last child.\n\tcount := 0\n\tfor _, child := range node.children {\n\t\tlocalStopAt := node.ptr\n\t\tcount++\n\t\tif count == len(node.children) {\n\t\t\tlocalStopAt = stopAt\n\t\t}\n\t\terr := fup.prepTree(\n\t\t\tctx, lState, unmergedChains, newMD, chargedTo, child, localStopAt,\n\t\t\tdbm, newFileBlocks, dirtyBcache, bps, copyBehavior)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func createMinimalFile() {\n\tlog.Printf(\"createMinimalFile\")\n\n\tminimal, err := pdf.Create(\"h7-minimal.pdf\")\n\tif err != nil {\n\t\tlog.Fatalln(errgo.Details(err))\n\t}\n\tdefer minimal.Close()\n\n\tminimal.Root = pdf.ObjectReference{ObjectNumber: 1}\n\n\t// catalog\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 1},\n\t\tObject: pdf.Dictionary{\n\t\t\tpdf.Name(\"Type\"): pdf.Name(\"Catalog\"),\n\t\t\tpdf.Name(\"Outlines\"): pdf.ObjectReference{\n\t\t\t\tObjectNumber: 2,\n\t\t\t},\n\t\t\tpdf.Name(\"Pages\"): pdf.ObjectReference{\n\t\t\t\tObjectNumber: 3,\n\t\t\t},\n\t\t},\n\t})\n\n\t// outlines\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 2},\n\t\tObject: pdf.Dictionary{\n\t\t\tpdf.Name(\"Type\"): pdf.Name(\"Outlines\"),\n\t\t\tpdf.Name(\"Count\"): pdf.Integer(0),\n\t\t},\n\t})\n\n\t// pages\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 3},\n\t\tObject: pdf.Dictionary{\n\t\t\tpdf.Name(\"Type\"): pdf.Name(\"Pages\"),\n\t\t\tpdf.Name(\"Kids\"): pdf.Array{\n\t\t\t\tpdf.ObjectReference{\n\t\t\t\t\tObjectNumber: 4,\n\t\t\t\t},\n\t\t\t},\n\t\t\tpdf.Name(\"Count\"): pdf.Integer(1),\n\t\t},\n\t})\n\n\t// page\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 4},\n\t\tObject: pdf.Dictionary{\n\t\t\tpdf.Name(\"Type\"): pdf.Name(\"Page\"),\n\t\t\tpdf.Name(\"Parent\"): pdf.ObjectReference{\n\t\t\t\tObjectNumber: 3,\n\t\t\t},\n\t\t\tpdf.Name(\"MediaBox\"): pdf.Array{\n\t\t\t\tpdf.Integer(0),\n\t\t\t\tpdf.Integer(0),\n\t\t\t\tpdf.Integer(612),\n\t\t\t\tpdf.Integer(792),\n\t\t\t},\n\t\t\tpdf.Name(\"Contents\"): pdf.ObjectReference{\n\t\t\t\tObjectNumber: 5,\n\t\t\t},\n\t\t\tpdf.Name(\"Resources\"): pdf.Dictionary{\n\t\t\t\tpdf.Name(\"ProcSet\"): pdf.ObjectReference{\n\t\t\t\t\tObjectNumber: 6,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t})\n\n\t// content stream\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 5},\n\t\tObject: pdf.Stream{\n\t\t\tDictionary: pdf.Dictionary{\n\t\t\t\tpdf.Name(\"Length\"): pdf.Integer(0),\n\t\t\t},\n\t\t},\n\t})\n\n\t// procset\n\tminimal.Add(pdf.IndirectObject{\n\t\tObjectReference: pdf.ObjectReference{ObjectNumber: 6},\n\t\tObject: pdf.Array{\n\t\t\tpdf.Name(\"PDF\"),\n\t\t},\n\t})\n\n\tminimal.Root = pdf.ObjectReference{ObjectNumber: 1}\n\n\terr = minimal.Save()\n\tif err != nil {\n\t\tlog.Fatalln(errgo.Details(err))\n\t}\n}", "func (c *Reader) addParents(filepath string) {\n\tdir, f := path.Split(filepath)\n\tif dir == \"\" {\n\t\tdir = \"/\"\n\t} else {\n\t\tdir = strings.TrimSuffix(dir, \"/\")\n\t}\n\tdebug(fmt.Sprintf(\"adding %q as a child of %q\", f, dir))\n\t// TODO(asjoyner): handle file + directory collisions\n\tif parent, ok := c.nodes[dir]; !ok {\n\t\t// if the parent node doesn't yet exist, initialize it\n\t\tc.nodes[dir] = Node{\n\t\t\tFilename: dir,\n\t\t\tChildren: map[string]bool{f: true},\n\t\t}\n\t} else {\n\t\tparent.Children[f] = true\n\t}\n\tif dir != \"/\" {\n\t\tc.addParents(dir)\n\t}\n}", "func execNew(_ int, p *gop.Context) {\n\targs := p.GetArgs(3)\n\tret := doc.New(args[0].(*ast.Package), args[1].(string), doc.Mode(args[2].(int)))\n\tp.Ret(3, ret)\n}", "func (inp ThingFrom) ThingFork() (out1, out2 ThingFrom) {\n\tcha1 := make(chan Thing)\n\tcha2 := make(chan Thing)\n\tgo inp.forkThing(cha1, cha2)\n\treturn cha1, cha2\n}", "func ProcessForkedContext(parent context.Context) context.Context {\n\treturn addFlagToContext[processForkedFlag](parent)\n}", "func (task *Task) Pave() {\n\tvar message bytes.Buffer\n\thasParentCmd := task.Config.CmdString != \"\"\n\thasHeader := len(task.Children) > 0\n\tnumTasks := len(task.Children)\n\tif hasParentCmd {\n\t\tnumTasks++\n\t}\n\tscr := newScreen()\n\tscr.ResetFrame(numTasks, hasHeader, Config.Options.ShowSummaryFooter)\n\n\t// make room for the title of a parallel proc group\n\tif hasHeader {\n\t\tmessage.Reset()\n\t\tlineObj := LineInfo{Status: statusRunning.Color(\"i\"), Title: task.Config.Name, Msg: \"\", Prefix: Config.Options.BulletChar}\n\t\ttask.Display.Template.Execute(&message, lineObj)\n\t\tscr.DisplayHeader(message.String())\n\t}\n\n\tif hasParentCmd {\n\t\ttask.Display.Values = LineInfo{Status: statusPending.Color(\"i\"), Title: task.Config.Name}\n\t\ttask.display()\n\t}\n\n\tfor line := 0; line < len(task.Children); line++ {\n\t\ttask.Children[line].Display.Values = LineInfo{Status: statusPending.Color(\"i\"), Title: task.Children[line].Config.Name}\n\t\ttask.Children[line].display()\n\t}\n}", "func spDocument(ptr unsafe.Pointer) unsafe.Pointer {\n\treturn C.sp_document(ptr)\n}", "func PrimaryDocumentation() {\n\tdocNum := 0\n\tfor {\n\t\t_ = ioutil.WriteFile(\"system/primary_doc\"+strconv.Itoa(ElevatorID)+\".txt\", []byte(strconv.FormatInt(int64(docNum), 10)), 0644)\n\t\ttime.Sleep(1*time.Second)\n\t\tdocNum += 1\n\t}\n}", "func main() {\n\tflag.Parse()\n\tif *version {\n\t\tfmt.Fprintf(os.Stderr, \"Version 0.1\\n\")\n\t\tos.Exit(0)\n\t}\n\n\tif *dbName == \"\" {\n\t\tflag.Usage()\n\t\tos.Exit(1)\n\t}\n\n\tif *user == \"\" {\n\t\t*user = *dbName\n\t}\n\n\tif *pass == \"\" {\n\t\tfmt.Printf(\"Password: \")\n\t\tos.Stdout.Sync()\n\t\t_, err := fmt.Scanln(pass)\n\t\tutil.CheckErrFatal(err)\n\t}\n\n\tif !util.FileExists(\"content\") {\n\t\tfmt.Fprintln(os.Stderr, \"There is no content directory here. Did you mean to try somewhere else?\")\n\t\tos.Exit(2)\n\t}\n\n\t// username:password@protocol(address)/dbname?param=value\n\tdb := model.Connect(*driver, *user+\":\"+*pass+\"@tcp(\"+*host+\")/\"+*dbName, *prefix, *verbose)\n\tcckFieldTypes, err := db.CCKFields()\n\tif err != nil && *emvideoField != \"\" {\n\t\tutil.Fatal(\"Unable to retrieve CCK Field metadata: %s\", err.Error())\n\t}\n\n\tallBookPagesAsMap := make(map[int32]*model.BookPage) //db.AllBookPagesAsMap()\n\n\t//\tfmt.Println(\"\\nnode types:\")\n\t//\tspew.Dump(db.AllNodeTypes())\n\t//\tfmt.Println(\"\\nbooks:\")\n\t//\tspew.Dump(db.AllBooksAsMap())\n\t//\tfmt.Println(\"\\nbook pages:\")\n\t//\tspew.Dump(allBookPagesAsMap)\n\t//\tfmt.Println(\"\\nmenus:\")\n\t//\tspew.Dump(db.AllMenus())\n\tprocessVocabs(db)\n\n\t//\tfor _, node := range model.AllNodes(db, *prefix) {\n\t//\t\tfmt.Printf(\"%v\\n\", node)\n\t//\t}\n\n\toffset := 0\n\tnodes := db.JoinedNodeFields(offset, 10)\n\tfor len(nodes) > 0 {\n\t\tfor _, node := range nodes {\n\t\t\talias := db.GetUrlAlias(node.Nid)\n\t\t\tterms := db.JoinedTaxonomyTerms(node.Nid)\n\t\t\tmenus := db.JoinedMenusForPath(fmt.Sprintf(\"node/%d\", node.Nid))\n\t\t\temvideos := make([]model.Emvideo, 0, 10)\n\t\t\tif *emvideoField != \"\" {\n\t\t\t\tcckData, err := db.CCKDataForNode(node, cckFieldTypes[node.Type])\n\t\t\t\tif err != nil {\n\t\t\t\t\tutil.Fatal(\"Unable to get CCK field data for node: %s\", err.Error())\n\t\t\t\t}\n\t\t\t\tfor _, cckFieldType := range cckFieldTypes[node.Type] {\n\t\t\t\t\tif cckFieldType.Name == *emvideoField {\n\t\t\t\t\t\tvideo, err := model.EmvideoForNodeField(cckFieldType, cckData)\n\t\t\t\t\t\tif err == nil {\n\t\t\t\t\t\t\temvideos = append(emvideos, *video)\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t//\t\t\thasMenuOrBook := false\n\t\t\tfmt.Printf(\"node/%d %s %s\\n\", node.Nid, alias, node.Title)\n\t\t\tif bookPage, exists := allBookPagesAsMap[node.Nid]; exists {\n\t\t\t\t//\t\t\t\tspew.Printf(\" book %v\\n\", bookPage)\n\t\t\t\tif len(menus) == 0 {\n\t\t\t\t\tmenus = db.MenusForMlid(bookPage.Mlid)\n\t\t\t\t}\n\t\t\t\t//\t\t\t\thasMenuOrBook = true\n\t\t\t}\n\t\t\tif len(menus) > 0 {\n\t\t\t\t//\t\t\t\tspew.Printf(\" menu %v\\n\", menus)\n\t\t\t\t//\t\t\t\thasMenuOrBook = true\n\t\t\t}\n\t\t\t//\t\t\tif !hasMenuOrBook {\n\t\t\t//\t\t\t\tfmt.Printf(\" --\\n\")\n\t\t\t//\t\t\t}\n\t\t\tprocessNode(node, alias, terms, menus, emvideos)\n\t\t}\n\t\toffset += len(nodes)\n\t\tnodes = db.JoinedNodeFields(offset, 10)\n\t}\n\tfmt.Printf(\"Total %d nodes.\\n\", offset)\n}", "func (db *Database) insertFilePart(fp FilePart, owner Client, storer DbClient) {\n\tdbF := db.dbFileForClientFile(fp.parent, owner)\n\tif _, err := db.Exec(\"INSERT INTO FilePart (parentId, name, fileIndex) VALUES ($1, $2, $3)\", dbF.id, fp.name, fp.index); err != nil {\n\t\tlog.Println(\"save file part:\", err)\n\t}\n}", "func (b *Buffer) Parent() *Buffer { return b.parent }", "func CreatePage(\n\txRefTable *model.XRefTable,\n\tparentPageIndRef types.IndirectRef,\n\tp *model.Page,\n\tfonts model.FontMap) (*types.IndirectRef, types.Dict, error) {\n\n\tpageDict := types.Dict(\n\t\tmap[string]types.Object{\n\t\t\t\"Type\": types.Name(\"Page\"),\n\t\t\t\"Parent\": parentPageIndRef,\n\t\t\t\"MediaBox\": p.MediaBox.Array(),\n\t\t\t\"CropBox\": p.CropBox.Array(),\n\t\t},\n\t)\n\n\terr := addPageResources(xRefTable, pageDict, *p, fonts)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tir, err := xRefTable.StreamDictIndRef(p.Buf.Bytes())\n\tif err != nil {\n\t\treturn nil, pageDict, err\n\t}\n\tpageDict.Insert(\"Contents\", *ir)\n\n\tpageDictIndRef, err := xRefTable.IndRefForNewObject(pageDict)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tif len(p.AnnotTabs) == 0 && len(p.Annots) == 0 && len(p.LinkAnnots) == 0 {\n\t\treturn pageDictIndRef, pageDict, nil\n\t}\n\n\tif err := setAnnotationParentsAndFields(xRefTable, p, *pageDictIndRef); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tarr, err := mergeAnnotations(nil, p.Annots, p.AnnotTabs)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tfor _, la := range p.LinkAnnots {\n\t\td, err := la.RenderDict(xRefTable, *pageDictIndRef)\n\t\tif err != nil {\n\t\t\treturn nil, nil, &json.UnsupportedTypeError{}\n\t\t}\n\t\tir, err := xRefTable.IndRefForNewObject(d)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\tarr = append(arr, *ir)\n\t}\n\n\tpageDict[\"Annots\"] = arr\n\n\treturn pageDictIndRef, pageDict, err\n}", "func makeDiversion() (*os.File, chan string) {\n\treader, out, err := os.Pipe()\n\tif err != nil {\n\t\tpanic(\"Cannot create pipe\")\n\t}\n\toutCh := make(chan string)\n\n\tgo func() {\n\t\tvar buf bytes.Buffer\n\t\tio.Copy(&buf, reader)\n\t\toutCh <- buf.String()\n\t}()\n\n\treturn out, outCh\n}", "func (r *retriever) Process() {\n\tvar wg sync.WaitGroup\n\n\tfor url := range r.In {\n\t\twg.Add(1)\n\t\t// For now the goroutines will be unbounded\n\t\tgo func(url string) {\n\t\t\tdefer wg.Done()\n\t\t\t// TODO: Add an error process\n\t\t\tif html, err := r.getter(url); err == nil {\n\t\t\t\tr.Out <- NewPage(url, html)\n\t\t\t} else {\n\t\t\t\tr.Err <- err\n\t\t\t}\n\t\t}(url)\n\t}\n\n\twg.Wait()\n}", "func createHTMLNote(htmlFileName, mdFileName string) error {\n\tvar result error\n\tlog.Print(\"Generating HTML release note...\")\n\tcssFileName := \"/tmp/release_note_cssfile\"\n\tcssFile, err := os.Create(cssFileName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create css file %s: %v\", cssFileName, err)\n\t}\n\n\tcssFile.WriteString(\"<style type=text/css> \")\n\tcssFile.WriteString(\"table,th,tr,td {border: 1px solid gray; \")\n\tcssFile.WriteString(\"border-collapse: collapse;padding: 5px;} \")\n\tcssFile.WriteString(\"</style>\")\n\t// Here we manually close the css file instead of defer the close function,\n\t// because we need to use the css file for pandoc command below.\n\t// Writing to css file is a clear small logic so we don't separate it into\n\t// another function.\n\tif err = cssFile.Close(); err != nil {\n\t\treturn fmt.Errorf(\"failed to close file %s, %v\", cssFileName, err)\n\t}\n\n\thtmlStr, err := u.Shell(\"pandoc\", \"-H\", cssFileName, \"--from\", \"markdown_github\", \"--to\", \"html\", mdFileName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to generate html content: %v\", err)\n\t}\n\n\thtmlFile, err := os.Create(htmlFileName)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to create html file: %v\", err)\n\t}\n\tdefer func() {\n\t\tif err = htmlFile.Close(); err != nil {\n\t\t\tresult = fmt.Errorf(\"failed to close file %s, %v\", htmlFileName, err)\n\t\t}\n\t}()\n\n\thtmlFile.WriteString(htmlStr)\n\treturn result\n}", "func (f *HTTPFrontend) Fork(opts HTTPFrontendOptions) (fn *HTTPFrontend, err error) {\n\tfn = &HTTPFrontend{}\n\tfn.opts.CopyFrom(&opts)\n\tfn.workerTkr = time.NewTicker(100 * time.Millisecond)\n\tfn.ctx, fn.ctxCancel = context.WithCancel(context.Background())\n\n\tpromLabels := prometheus.Labels{\n\t\t\"frontend\": fn.opts.Name,\n\t}\n\tfn.promReadBytes = promHTTPFrontendReadBytes.MustCurryWith(promLabels)\n\tfn.promWriteBytes = promHTTPFrontendWriteBytes.MustCurryWith(promLabels)\n\tfn.promRequestsTotal = promHTTPFrontendRequestsTotal.MustCurryWith(promLabels)\n\tfn.promRequestDurationSeconds = promHTTPFrontendRequestDurationSeconds.MustCurryWith(promLabels)\n\tfn.promConnectionsTotal = promHTTPFrontendConnectionsTotal.MustCurryWith(promLabels)\n\tfn.promActiveConnections = promHTTPFrontendActiveConnections.MustCurryWith(promLabels)\n\tfn.promIdleConnections = promHTTPFrontendIdleConnections.MustCurryWith(promLabels)\n\tfn.promWaitingConnections = promHTTPFrontendWaitingConnections.MustCurryWith(promLabels)\n\n\tdefer func() {\n\t\tif err == nil {\n\t\t\treturn\n\t\t}\n\t\tfn.Close()\n\t\tfn = nil\n\t}()\n\n\tfn.workerWg.Add(1)\n\tgo fn.worker()\n\n\treturn\n}", "func interopPreprocess(doc *diddoc.Doc) *diddoc.Doc {\n\treturn doc\n}", "func test() {\n\n\tif _, err := os.Stat(\"OutputFile.md\"); err == nil {\n\t\t// path/to/whatever exists\n\t\tdeleteFile(\"OutputFile.md\")\n\n\t} else if os.IsNotExist(err) {\n\t\t// path/to/whatever does *not* exist\n\t\tfmt.Print(\" No file names as OutputFile.md for the moment\")\n\n\t} else {\n\t\t// Schrodinger: file may or may not exist. See err for details.\n\t\t// Therefore, do *NOT* use !os.IsNotExist(err) to test for file existence\n\t}\n\n\tcreateFile(\"OutputFile.md\")\n\n\tnombreClients := 8 \n\tnombreCoiffeurs := 4 // Simulation à 4 coiffeurs, attention prendre le meme nombre que dans le fichier texte\n\tPresentationJolie()\n\n\tfileAttente := make(chan client.Client, nombreClients) //création de la file d'attente de clients\n\tfileCoiffeursLibres := make(chan coiffeur.Coiffeur, nombreCoiffeurs)\n\tfileCoiffeursOccupes := make(chan coiffeur.Coiffeur, nombreCoiffeurs)\n\n\tfmt.Println(\"Creation file d'attente \")\n\tcoiffeursLibres := CreationCoiffeurs() //création de la liste de coiffeurs d'après InputFile.txt\n\tfmt.Println(\"Creation liste coiffeurs \")\n\tlisteClients := CreationClients()\n\tfmt.Println(\"Creation liste clients \")\n\n\tfor i:=0; i < nombreClients; i++ {\n\t\tfileAttente <-listeClients[i]\n\t\twg.Add(1)\n\t}\n\tfmt.Println(\" Coiffeurs : \", coiffeursLibres)\n\tfmt.Println(\" Clients : \", len(fileAttente))\n\n\tfor i:=0; i < nombreCoiffeurs; i++ {\n\t\tfileCoiffeursLibres <- coiffeursLibres[i]\n\t}\n\n\t//boucle qui tourne pendant toute l'execution du programme\n\tfor len(fileAttente)!= 0 { \t\t\n\t\tclientOccupe := <-fileAttente // retire un client de la file d'attente\n\t\tnewHaird := haird_busy(fileCoiffeursLibres, fileCoiffeursOccupes) // choisit quel coiffeur s'en occupe\n\t\tgo operation(&clientOccupe, &newHaird, fileCoiffeursLibres, fileCoiffeursOccupes) // lancement de l'opération\n\t}\n\n\tfmt.Println(\"nombre coiffeurs libres :\", len(fileCoiffeursLibres))\n\n\twg.Wait() //empêche le programme de terminer avant les go-routines\n\n\tduration :=end_of_day()\n\tfmt.Println( \" The duration of today's process for the\", nombreClients, \"clients was \", duration)\n\tFinJolie()\n}" ]
[ "0.55878586", "0.53069735", "0.52989525", "0.52003443", "0.5189423", "0.51361525", "0.51097566", "0.5096345", "0.50834894", "0.50744003", "0.5070284", "0.5069732", "0.50068974", "0.4931216", "0.49018574", "0.48747537", "0.4805411", "0.47457948", "0.47219247", "0.4644027", "0.4630759", "0.45893553", "0.45518267", "0.4550035", "0.45004654", "0.44998214", "0.44710833", "0.4461322", "0.4429096", "0.44290525", "0.44263554", "0.44262335", "0.43942633", "0.4379227", "0.43630344", "0.43192613", "0.431742", "0.43146104", "0.43125176", "0.43115357", "0.43078196", "0.43054873", "0.4304829", "0.4304721", "0.43037498", "0.42978945", "0.42844", "0.42772886", "0.42734355", "0.42626742", "0.42597723", "0.42579168", "0.4254961", "0.42352456", "0.4235098", "0.4228446", "0.42259654", "0.42199785", "0.42196384", "0.42185584", "0.4217089", "0.42161667", "0.42145598", "0.42042443", "0.4196738", "0.41789377", "0.417828", "0.4176783", "0.417115", "0.41688505", "0.41622144", "0.41604444", "0.4158504", "0.41545725", "0.41535723", "0.41365", "0.4133525", "0.41292307", "0.4121915", "0.41112688", "0.41053975", "0.40994877", "0.40920293", "0.4089604", "0.4087347", "0.40865546", "0.40805507", "0.4072996", "0.40709567", "0.4070246", "0.40682128", "0.40672693", "0.4065173", "0.4059238", "0.40578178", "0.40481907", "0.40415663", "0.40397164", "0.40388107", "0.40381566" ]
0.51040345
7
Does the necessary housekeeping to save a revision of a document. Returns the ObjectId for the new document type id. It would be nice to refactor this somehow but not require each document type to implement it.
func (d *Doc) PreSave(cur bson.ObjectId) bson.ObjectId { d.History = append(d.History, cur) return bson.NewObjectId() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (d *Document) ID() int { return d.Document.DocumentID }", "func newDocWithId(c *gin.Context) {\n\tkey := c.Params.ByName(\"id\")\n\tres := saveDocument(key, c)\n\tif res.Ok == false {\n\t\tlog.Printf(\"Error saving document: %s\", res.Error)\n\t\tc.JSON(statusErr, res)\n\t} else {\n\t\tc.JSON(statusOk, res)\n\t}\n}", "func (metas metadatas) getDocumentID(document interface{}) (id bson.ObjectId, err error) {\n\tValue := reflect.ValueOf(document)\n\tmeta, ok := metas[Value.Type()]\n\tif !ok {\n\t\treturn id, ErrDocumentNotRegistered\n\t}\n\tidFields, ok := meta.findIDField()\n\tif !ok {\n\t\treturn id, ErrIDFieldNotFound\n\t}\n\treturn Value.Elem().FieldByName(idFields.name).Interface().(bson.ObjectId), nil\n\n}", "func (sid SourceID) RevID() string {\n\treturn sid.Version.Meta\n}", "func (db *DB) Post(doc interface{}) (id, rev string, err error) {\n\tpath := revpath(\"\", db.name)\n\t// TODO: make it possible to stream encoder output somehow\n\tjson, err := json.Marshal(doc)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\tb := bytes.NewReader(json)\n\tresp, err := db.request(db.ctx, \"POST\", path, b)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\treturn responseIDRev(resp)\n}", "func (wt *WaterTower) PostDocument(uniqueKey string, document *Document) (uint32, error) {\n\tretryCount := 50\n\tvar lastError error\n\tvar docID uint32\n\tnewTags, newDocTokens, wordCount, titleWordCount, err := wt.analyzeDocument(\"new\", document)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\tfor i := 0; i < retryCount; i++ {\n\t\tdocID, lastError = wt.postDocumentKey(uniqueKey)\n\t\tif lastError == nil {\n\t\t\tbreak\n\t\t}\n\t}\n\tif lastError != nil {\n\t\treturn 0, fmt.Errorf(\"fail to register document's unique key: %w\", lastError)\n\t}\n\tfor i := 0; i < retryCount; i++ {\n\t\toldDoc, err := wt.postDocument(docID, uniqueKey, wordCount, titleWordCount, document)\n\t\tif err != nil {\n\t\t\tlastError = err\n\t\t\tcontinue\n\t\t}\n\t\toldTags, oldDocTokens, _, _, err := wt.analyzeDocument(\"old\", oldDoc)\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\t\terr = wt.updateTagsAndTokens(docID, oldTags, newTags, oldDocTokens, newDocTokens)\n\t\tif err != nil {\n\t\t\tlastError = err\n\t\t\tcontinue\n\t\t}\n\t\treturn docID, nil\n\t}\n\treturn 0, fmt.Errorf(\"fail to register document: %w\", lastError)\n}", "func (d *DbBackendCouch) save(database string,\n\tdocumentID string,\n\tdocument interface{}) (string, interface{}, error) {\n\terr := d.ensureConnection()\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\tdb, err := d.client.EnsureDB(database)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\trev, err := db.Rev(documentID)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\t_, err = db.Put(documentID, document, rev)\n\tif err != nil {\n\t\treturn \"\", nil, err\n\t}\n\n\treturn documentID, document, err\n}", "func applyID(doc document.Document, id string) document.Document {\n\t// apply id to document\n\tdoc[\"id\"] = id\n\n\treturn doc\n}", "func sthForRevision(revision int64) int64 {\n\treturn revision + 1\n}", "func GetNewObjectId () string {\n\treturn bson.NewObjectId().String()\n}", "func (s *InMemoryDocumentSessionOperations) GetDocumentID(instance interface{}) string {\n\tif instance == nil {\n\t\treturn \"\"\n\t}\n\tvalue := s.documentsByEntity[instance]\n\tif value == nil {\n\t\treturn \"\"\n\t}\n\treturn value.id\n}", "func (*PersonalDocument) TypeID() uint32 {\n\treturn PersonalDocumentTypeID\n}", "func getObjectRevision(obj client.Object) (int64, error) {\n\ta := obj.GetAnnotations()\n\tif a == nil {\n\t\treturn 0, nil\n\t}\n\n\tif len(a[revisionAnnotation]) == 0 {\n\t\treturn 0, nil\n\t}\n\n\treturn strconv.ParseInt(a[revisionAnnotation], 10, 64)\n}", "func (o WorkflowOutput) RevisionId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Workflow) pulumi.StringOutput { return v.RevisionId }).(pulumi.StringOutput)\n}", "func (rt *RestTester) PutDocumentWithRevID(docID string, newRevID string, parentRevID string, body db.Body) (response *TestResponse, err error) {\n\n\trequestBody := body.ShallowCopy()\n\tnewRevGeneration, newRevDigest := db.ParseRevID(newRevID)\n\n\trevisions := make(map[string]interface{})\n\trevisions[\"start\"] = newRevGeneration\n\tids := []string{newRevDigest}\n\tif parentRevID != \"\" {\n\t\t_, parentDigest := db.ParseRevID(parentRevID)\n\t\tids = append(ids, parentDigest)\n\t}\n\trevisions[\"ids\"] = ids\n\n\trequestBody[db.BodyRevisions] = revisions\n\trequestBytes, err := json.Marshal(requestBody)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresp := rt.SendAdminRequest(http.MethodPut, \"/{{.keyspace}}/\"+docID+\"?new_edits=false\", string(requestBytes))\n\treturn resp, nil\n}", "func (reg *defaultRegistry) GetRevision(gen runtime.Generation) (*engine.Revision, error) {\n\t// todo thing about replacing hardcoded key with some flag in Info that will show that there is a single object of that kind\n\tvar revision *engine.Revision\n\terr := reg.store.Find(engine.TypeRevision.Kind, &revision, store.WithKey(engine.RevisionKey), store.WithGen(gen))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif revision == nil {\n\t\treturn nil, nil\n\t}\n\n\treturn revision, nil\n}", "func UpdateDocument(id, rev string, doc interface{}) error {\n\t_, err := DB.UpdateDocument(id, rev, doc)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func (d *Document) FindOneWithRevision(\n\tcontext.Context, string, int64,\n) (*flare.Document, error) {\n\treturn nil, errors.New(\"not implemented\")\n}", "func (*InputIdentityDocument) TypeID() uint32 {\n\treturn InputIdentityDocumentTypeID\n}", "func requireRevID(t *testing.T, rt *RestTester, docID, revID string) {\n\tdoc, err := rt.GetDatabase().GetDocument(logger.TestCtx(t), docID, db.DocUnmarshalAll)\n\trequire.NoError(t, err, \"Error reading document from bucket\")\n\trequire.Equal(t, revID, doc.SyncData.CurrentRev)\n}", "func (o KubernetesClusterIngressApplicationGatewayIngressApplicationGatewayIdentityOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v KubernetesClusterIngressApplicationGatewayIngressApplicationGatewayIdentity) *string {\n\t\treturn v.ObjectId\n\t}).(pulumi.StringPtrOutput)\n}", "func (db *DB) Put(id string, doc interface{}, rev string) (newrev string, err error) {\n\tpath := revpath(rev, db.name, id)\n\t// TODO: make it possible to stream encoder output somehow\n\tjson, err := json.Marshal(doc)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tb := bytes.NewReader(json)\n\treturn responseRev(db.closedRequest(db.ctx, \"PUT\", path, b))\n}", "func (m *Mongo) Save(c string, i interface{}) {\n\tvar bs bson.ObjectId\n\tentity := true\n\tid, ok := getField(\"ID\", i)\n\n\tdb, s := m.Instance()\n\tdefer s.Close()\n\n\tif !ok {\n\t\tm.GM.Log.Warning(\"Couldn't find an id field, record is being inserted with no id.\")\n\t\tentity = false\n\t}\n\n\tif ok && bson.IsObjectIdHex(id.(string)) {\n\t\tbs = bson.ObjectIdHex(id.(string))\n\t}\n\n\tif entity && bs.Valid() {\n\t\tq := bson.M{\"entity._id\": bs.Hex()}\n\n\t\t// Fire the On Update event\n\t\tent := i.(EntityInterface)\n\t\tent.OnUpdate(m.GM)\n\n\t\t// Update the record based on its id\n\t\tif err := db.C(c).Update(q, i); err != nil {\n\t\t\tm.GM.Log.Error(err)\n\t\t\treturn\n\t\t}\n\n\t} else {\n\n\t\tif entity {\n\t\t\t// Update the entity fields\n\t\t\tent := i.(EntityInterface)\n\t\t\tent.OnCreate(bson.NewObjectId().Hex(), m.GM)\n\t\t}\n\n\t\tif err := db.C(c).Insert(i); err != nil {\n\t\t\tm.GM.Log.Error(err)\n\t\t\treturn\n\t\t}\n\t}\n\n\t// If this is an entity, we want to send this to the stream manager\n\tif entity {\n\t\tm.GM.StreamManager.Updates(i)\n\t}\n}", "func (o KubernetesClusterWebAppRoutingWebAppRoutingIdentityOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v KubernetesClusterWebAppRoutingWebAppRoutingIdentity) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (o ServicePrincipalOutput) ObjectId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ServicePrincipal) pulumi.StringOutput { return v.ObjectId }).(pulumi.StringOutput)\n}", "func (r *Revision) ID() string {\n\treturn fmt.Sprintf(\"%d\", r.Created.AsTime().UnixNano())\n}", "func (doc *Document) ID() string {\n\treturn stringEntry((*doc)[jsonldID])\n}", "func (c *Class) save() error {\n\n\tif c.ID.Valid() {\n\t\tres := ClassCollection.Find(db.Cond{\n\t\t\t\"_id\": c.ID,\n\t\t})\n\t\treturn res.Update(c)\n\t}\n\n\tc.CreatedAt = time.Now()\n\n\tid, err := ClassCollection.Append(c)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tc.ID = id.(bson.ObjectId)\n\n\treturn nil\n}", "func (o ManagedInstanceActiveDirectoryAdministratorOutput) ObjectId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ManagedInstanceActiveDirectoryAdministrator) pulumi.StringOutput { return v.ObjectId }).(pulumi.StringOutput)\n}", "func (o PrincipalsOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Principals) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (c *Contractor) saveRevision(id types.FileContractID) func(types.FileContractRevision, []crypto.Hash) error {\n\treturn func(rev types.FileContractRevision, newRoots []crypto.Hash) error {\n\t\tc.mu.Lock()\n\t\tdefer c.mu.Unlock()\n\t\tc.cachedRevisions[id] = cachedRevision{rev, newRoots}\n\t\treturn c.saveSync()\n\t}\n}", "func (db *Database) putSpecial(doctype string, docid string, matchRev string, body Body) (string, error) {\n\tkey := db.realSpecialDocID(doctype, docid)\n\tif key == \"\" {\n\t\treturn \"\", base.HTTPErrorf(400, \"Invalid doc ID\")\n\t}\n\tvar revid string\n\n\texpiry, err := body.getExpiry()\n\tif err != nil {\n\t\treturn \"\", base.HTTPErrorf(http.StatusBadRequest, \"Invalid expiry: %v\", err)\n\t}\n\terr = db.Bucket.Update(key, int(expiry), func(value []byte) ([]byte, error) {\n\t\tif len(value) == 0 {\n\t\t\tif matchRev != \"\" || body == nil {\n\t\t\t\treturn nil, base.HTTPErrorf(http.StatusNotFound, \"No previous revision to replace\")\n\t\t\t}\n\t\t} else {\n\t\t\tprevBody := Body{}\n\t\t\tif err := json.Unmarshal(value, &prevBody); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\tif matchRev != prevBody[\"_rev\"] {\n\t\t\t\treturn nil, base.HTTPErrorf(http.StatusConflict, \"Document update conflict\")\n\t\t\t}\n\t\t}\n\n\t\tif body != nil {\n\t\t\t// Updating:\n\t\t\tvar generation uint\n\t\t\tif matchRev != \"\" {\n\t\t\t\tfmt.Sscanf(matchRev, \"0-%d\", &generation)\n\t\t\t}\n\t\t\trevid = fmt.Sprintf(\"0-%d\", generation+1)\n\t\t\tbody[\"_rev\"] = revid\n\t\t\treturn json.Marshal(body)\n\t\t} else {\n\t\t\t// Deleting:\n\t\t\treturn nil, nil\n\t\t}\n\t})\n\n\treturn revid, err\n}", "func (r *Document) ID() pulumi.IDOutput {\n\treturn r.s.ID()\n}", "func (o KubernetesClusterKubeletIdentityOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v KubernetesClusterKubeletIdentity) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (o MongoDBCollectionResourcePtrOutput) Id() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *MongoDBCollectionResource) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.Id\n\t}).(pulumi.StringPtrOutput)\n}", "func R(r interface{}) Revision {\n\tswitch r := r.(type) {\n\tcase string:\n\t\trevision, err := ParseRevision(r)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\treturn revision\n\tcase int:\n\t\treturn Revision{r}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"cannot use %v (%T) as a snap revision\", r, r))\n\t}\n}", "func (d *Document) ID() uint64 {\n\treturn d.id\n}", "func (ruo *RevisionUpdateOne) Save(ctx context.Context) (*Revision, error) {\n\tvar (\n\t\terr error\n\t\tnode *Revision\n\t)\n\tif len(ruo.hooks) == 0 {\n\t\tif err = ruo.check(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tnode, err = ruo.sqlSave(ctx)\n\t} else {\n\t\tvar mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {\n\t\t\tmutation, ok := m.(*RevisionMutation)\n\t\t\tif !ok {\n\t\t\t\treturn nil, fmt.Errorf(\"unexpected mutation type %T\", m)\n\t\t\t}\n\t\t\tif err = ruo.check(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\truo.mutation = mutation\n\t\t\tnode, err = ruo.sqlSave(ctx)\n\t\t\tmutation.done = true\n\t\t\treturn node, err\n\t\t})\n\t\tfor i := len(ruo.hooks) - 1; i >= 0; i-- {\n\t\t\tif ruo.hooks[i] == nil {\n\t\t\t\treturn nil, fmt.Errorf(\"ent: uninitialized hook (forgotten import ent/runtime?)\")\n\t\t\t}\n\t\t\tmut = ruo.hooks[i](mut)\n\t\t}\n\t\tif _, err := mut.Mutate(ctx, ruo.mutation); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn node, err\n}", "func (db *DB) Rev(ctx context.Context, docID string) (rev string, err error) {\n\tif r, ok := db.driverDB.(driver.Rever); ok {\n\t\treturn r.Rev(ctx, docID)\n\t}\n\t// These last two lines cannot be combined for GopherJS due to a bug.\n\t// See https://github.com/gopherjs/gopherjs/issues/608\n\trow, err := db.Get(ctx, docID, nil)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tvar doc struct {\n\t\tRev string `json:\"_rev\"`\n\t}\n\tif err = row.ScanDoc(&doc); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn doc.Rev, nil\n}", "func (db *MongoDB) Save(project, kind, key string, data interface{}) error {\n\treturn nil\n}", "func (*InputPersonalDocument) TypeID() uint32 {\n\treturn InputPersonalDocumentTypeID\n}", "func SaveLODocObj(m map[string]interface{}) (string, error) {\n\t// Connect to mongodb server from remote.\n\tsession, err := mgo.Dial(\"adaptivelearner:[email protected]/learningobjects\")\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer session.Close()\n\t// Optional. Switch the session to a monotonic behavior.\n\tsession.SetMode(mgo.Monotonic, true)\n\t// Use database learningobecjts and select learningobjects collection.\n\tc := session.DB(\"learningobjects\").C(\"learningobjects\")\n\t// Insert one learning object.\n\tuuid, err := newUUID()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tm[\"_id\"] = uuid\n\terr = c.Insert(m)\n\tif err != nil {\n\t\treturn \"\", err\n\t\tlog.Fatal(err)\n\t} else {\n\t\treturn uuid, nil\n\t}\n\n\treturn \"\", err\n}", "func GetObjectIDGraphQLType() *graphql.Scalar {\n\treturn graphql.NewScalar(graphql.ScalarConfig{\n\t\tName: \"BSON\",\n\t\tDescription: \"The `bson` scalar type represents a BSON Object.\",\n\t\t// Serialize serializes `bson.ObjectId` to string.\n\t\tSerialize: func(value interface{}) interface{} {\n\t\t\tswitch value := value.(type) {\n\t\t\tcase primitive.ObjectID:\n\t\t\t\treturn value.Hex()\n\t\t\tcase *primitive.ObjectID:\n\t\t\t\tv := *value\n\t\t\t\treturn v.Hex()\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t},\n\t\t// ParseValue parses GraphQL variables from `string` to `bson.ObjectId`.\n\t\tParseValue: func(value interface{}) interface{} {\n\t\t\tswitch value := value.(type) {\n\t\t\tcase string:\n\t\t\t\tid, _ := primitive.ObjectIDFromHex(value)\n\t\t\t\treturn id\n\t\t\tcase *string:\n\t\t\t\tid, _ := primitive.ObjectIDFromHex(*value)\n\t\t\t\treturn id\n\t\t\tdefault:\n\t\t\t\treturn nil\n\t\t\t}\n\t\t},\n\t\t// ParseLiteral parses GraphQL AST to `bson.ObjectId`.\n\t\tParseLiteral: func(valueAST ast.Value) interface{} {\n\t\t\tswitch valueAST := valueAST.(type) {\n\t\t\tcase *ast.StringValue:\n\t\t\t\tid, _ := primitive.ObjectIDFromHex(valueAST.Value)\n\t\t\t\treturn id\n\t\t\t}\n\t\t\treturn nil\n\t\t},\n\t})\n}", "func (s service) UpdateModel(ctx context.Context, payload documents.UpdatePayload) (documents.Model, jobs.JobID, error) {\n\tdid, err := contextutil.AccountDID(ctx)\n\tif err != nil {\n\t\treturn nil, jobs.NilJobID(), documents.ErrDocumentConfigAccountID\n\t}\n\n\told, err := s.GetCurrentVersion(ctx, payload.DocumentID)\n\tif err != nil {\n\t\treturn nil, jobs.NilJobID(), err\n\t}\n\n\toldGeneric, ok := old.(*Generic)\n\tif !ok {\n\t\treturn nil, jobs.NilJobID(), errors.NewTypedError(documents.ErrDocumentInvalidType, errors.New(\"%v is not a Generic Document\", hexutil.Encode(payload.DocumentID)))\n\t}\n\n\tg := new(Generic)\n\terr = g.unpackFromUpdatePayloadOld(oldGeneric, payload)\n\tif err != nil {\n\t\treturn nil, jobs.NilJobID(), errors.NewTypedError(documents.ErrDocumentInvalid, err)\n\t}\n\n\t// validate the generic document\n\terr = UpdateValidator(s.anchorSrv).Validate(old, g)\n\tif err != nil {\n\t\treturn nil, jobs.NilJobID(), errors.NewTypedError(documents.ErrDocumentInvalid, err)\n\t}\n\n\terr = s.repo.Create(did[:], g.CurrentVersion(), g)\n\tif err != nil {\n\t\treturn nil, jobs.NilJobID(), errors.NewTypedError(documents.ErrDocumentPersistence, err)\n\t}\n\n\tjobID := contextutil.Job(ctx)\n\tjobID, _, err = documents.CreateAnchorJob(ctx, s.jobManager, s.queueSrv, did, jobID, g.CurrentVersion())\n\treturn g, jobID, err\n}", "func (d *db) Rev(ctx context.Context, docID string) (rev string, err error) {\n\tres, err := d.Client.DoError(ctx, http.MethodHead, d.path(chttp.EncodeDocID(docID), nil), nil)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn strings.Trim(res.Header.Get(\"Etag\"), `\"\"`), nil\n}", "func (o MongoDBDatabaseResourcePtrOutput) Id() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *MongoDBDatabaseResource) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.Id\n\t}).(pulumi.StringPtrOutput)\n}", "func (l *levelDBRepo) save(tenantID, id []byte, model Model) error {\n\tdata, err := model.JSON()\n\tif err != nil {\n\t\treturn errors.NewTypedError(ErrDocumentRepositorySerialisation, errors.New(\"failed to marshall model: %v\", err))\n\t}\n\n\ttp := getTypeIndirect(model.Type())\n\tv := value{\n\t\tType: tp.String(),\n\t\tData: json.RawMessage(data),\n\t}\n\n\tdata, err = json.Marshal(v)\n\tif err != nil {\n\t\treturn errors.NewTypedError(ErrDocumentRepositorySerialisation, errors.New(\"failed to marshall value: %v\", err))\n\t}\n\n\tkey := getKey(tenantID, id)\n\terr = l.db.Put(key, data, nil)\n\tif err != nil {\n\t\treturn errors.NewTypedError(ErrDocumentRepositoryModelSave, errors.New(\"%v\", err))\n\t}\n\n\treturn nil\n}", "func (o ConnectionAzureOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ConnectionAzure) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (o KubernetesClusterKeyVaultSecretsProviderSecretIdentityOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v KubernetesClusterKeyVaultSecretsProviderSecretIdentity) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (db *DB) CreateDoc(ctx context.Context, doc interface{}) (docID, rev string, err error) {\n\treturn db.driverDB.CreateDoc(ctx, doc)\n}", "func (o GetKubernetesClusterIngressApplicationGatewayIngressApplicationGatewayIdentityOutput) ObjectId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetKubernetesClusterIngressApplicationGatewayIngressApplicationGatewayIdentity) string {\n\t\treturn v.ObjectId\n\t}).(pulumi.StringOutput)\n}", "func (o ApiOutput) LatestRevisionId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Api) pulumi.StringOutput { return v.LatestRevisionId }).(pulumi.StringOutput)\n}", "func NewObjectId() ObjectId {\n\treturn newObjectId(time.Seconds(), nextOidCounter())\n}", "func (i *Info) DocRev() uint8 {\n\tif i.Entry64 != nil {\n\t\treturn i.Entry64.SMBIOSDocRev\n\t}\n\treturn 0\n}", "func (o KubernetesClusterKubeletIdentityPtrOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *KubernetesClusterKubeletIdentity) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.ObjectId\n\t}).(pulumi.StringPtrOutput)\n}", "func setObjectRevision(obj client.Object, revision int64) {\n\ta := obj.GetAnnotations()\n\tif a == nil {\n\t\ta = map[string]string{}\n\t}\n\ta[revisionAnnotation] = fmt.Sprintf(\"%d\", revision)\n\tobj.SetAnnotations(a)\n}", "func (s Service) Commit(ctx context.Context, docID []byte) (documents.Model, jobs.JobID, error) {\n\treturn s.pendingDocSrv.Commit(ctx, docID)\n}", "func (o PrincipalsResponseOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v PrincipalsResponse) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (o *CreditBankStatementUploadObject) GetDocumentId() string {\n\tif o == nil || o.DocumentId.Get() == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn *o.DocumentId.Get()\n}", "func generateShortBsonId() string {\n\tid := []byte(bson.NewObjectId())\n\tbigInt := new(big.Int)\n\tbigInt.SetBytes(id)\n\treturn bigInt.Text(36)\n}", "func (p *MongodbProvider) Regenerate(oldsid, sid string) (_ session.RawStore, err error) {\n\tif p.Exist(sid) {\n\t\treturn nil, fmt.Errorf(\"new sid '%s' already exists\", sid)\n\t}\n\tif !p.Exist(oldsid) {\n\t\terr = p.c.Insert(&sessionDoc{Key: sid, Expiry: time.Now().Unix()})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\terr = p.c.Update(bson.M{\"key\": oldsid},bson.M{\"$set\":bson.M{\"key\": sid,\"expiry\": time.Now().Unix()}})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn p.Read(sid)\n}", "func createOrUpdateDoc(t *testing.T, rt *RestTester, docID, revID, bodyValue string) string {\n\tbody := fmt.Sprintf(`{\"key\":%q,\"channels\":[\"alice\"]}`, bodyValue)\n\tdbURL := \"/db/\" + docID\n\tif revID != \"\" {\n\t\tdbURL = \"/db/\" + docID + \"?rev=\" + revID\n\t}\n\tresp := rt.SendAdminRequest(http.MethodPut, dbURL, body)\n\tassertStatus(t, resp, http.StatusCreated)\n\trequire.NoError(t, rt.WaitForPendingChanges())\n\treturn respRevID(t, resp)\n}", "func (o GetKubernetesClusterKubeletIdentityOutput) ObjectId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetKubernetesClusterKubeletIdentity) string { return v.ObjectId }).(pulumi.StringOutput)\n}", "func (db *InMemDatabase) StoreDocument(t model.Document) (model.Document, error) {\n\tdb.currentId += 1\n\tt.Id = string(db.currentId)\n\tdb.documents = append(db.documents, t)\n\treturn t, nil\n}", "func (t *SignerHistory) DocID() *string {\n\ts := t.PagingToken.String()\n\treturn &s\n}", "func NewId(id string) mgobson.ObjectId { return mgobson.ObjectIdHex(id) }", "func (c *vertexCollection) Revision(ctx context.Context) (string, error) {\n\tresult, err := c.rawCollection().Revision(ctx)\n\tif err != nil {\n\t\treturn \"\", WithStack(err)\n\t}\n\treturn result, nil\n}", "func (g *Generator) documentType(currType string) string {\n\tif currType != \"\" {\n\t\treturn currType\n\t}\n\treturn g.types[rand.Intn(len(g.types))]\n}", "func TransformDocument(in interface{}) Document {\n\tvar document Document\n\tswitch v := in.(type) {\n\tcase bson.M:\n\t\tdocument.ID = v[\"_id\"].(bson.ObjectId)\n\t\tdocument.OwnerID = v[\"owner_id\"].(bson.ObjectId)\n\t\tdocument.URL = v[\"url\"].(string)\n\t\tdocument.DocType = v[\"doc_type\"].(string)\n\t\tdocument.OwnerType = v[\"owner_type\"].(string)\n\n\tcase Document:\n\t\tdocument = v\n\t}\n\n\treturn document\n}", "func (s *MongodbStore) ID() string {\n\treturn s.sid\n}", "func (s *BoltBackedService) PostDocument(ctx context.Context, d *Document) error {\n\tif err := d.SaveDoc(*s.db); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (col *Collection) Insert(doc map[string]interface{}) (id int, err error) {\n\tdocJS, err := json.Marshal(doc)\n\tif err != nil {\n\t\treturn\n\t}\n\tid = rand.Int()\n\tpartNum := id % col.db.numParts\n\tcol.db.access.RLock()\n\tpart := col.parts[partNum]\n\t// Put document data into collection\n\tpart.Lock.Lock()\n\tif _, err = part.Insert(id, []byte(docJS)); err != nil {\n\t\tpart.Lock.Unlock()\n\t\tcol.db.access.RUnlock()\n\t\treturn\n\t}\n\t// If another thread is updating the document in the meanwhile, let it take over index maintenance\n\tif err = part.LockUpdate(id); err != nil {\n\t\tpart.Lock.Unlock()\n\t\tcol.db.access.RUnlock()\n\t\treturn id, nil\n\t}\n\tpart.UnlockUpdate(id)\n\tpart.Lock.Unlock()\n\tcol.db.access.RUnlock()\n\n\tdoc[\"_id\"] = id\n\temitDoc(col.name, \"create\", doc)\n\treturn\n}", "func (o ClusterTemplateOutput) DefaultRevisionId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *ClusterTemplate) pulumi.StringOutput { return v.DefaultRevisionId }).(pulumi.StringOutput)\n}", "func (o KubernetesClusterOmsAgentOmsAgentIdentityOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v KubernetesClusterOmsAgentOmsAgentIdentity) *string { return v.ObjectId }).(pulumi.StringPtrOutput)\n}", "func (api DocumentAPI) Post(w http.ResponseWriter, r *http.Request) {\n\tcreateDocument(uuid.New().String(), w, r)\n}", "func (o LookupSimulationApplicationResultOutput) CurrentRevisionId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v LookupSimulationApplicationResult) *string { return v.CurrentRevisionId }).(pulumi.StringPtrOutput)\n}", "func (repo PostgresRepository) Save(document entity.Document) error {\n\t_, err := repo.db.Table(\"documents\").Insert(&document)\n\treturn err\n}", "func (o ConnectionAzurePtrOutput) ObjectId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ConnectionAzure) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.ObjectId\n\t}).(pulumi.StringPtrOutput)\n}", "func LegacyInstanceObjectID(obj *ResourceInstanceObjectSrc) string {\n\tif obj == nil {\n\t\treturn \"<not created>\"\n\t}\n\n\tif obj.AttrsJSON != nil {\n\t\ttype WithID struct {\n\t\t\tID string `json:\"id\"`\n\t\t}\n\t\tvar withID WithID\n\t\terr := json.Unmarshal(obj.AttrsJSON, &withID)\n\t\tif err == nil {\n\t\t\treturn withID.ID\n\t\t}\n\t} else if obj.AttrsFlat != nil {\n\t\tif flatID, exists := obj.AttrsFlat[\"id\"]; exists {\n\t\t\treturn flatID\n\t\t}\n\t}\n\n\t// For resource types created after we removed id as special there may\n\t// not actually be one at all. This is okay because older tests won't\n\t// encounter this, and new tests shouldn't be using ids.\n\treturn \"<none>\"\n}", "func CreateDocument(data interface{}) error {\n\tid, rev, err := DB.CreateDocument(data)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\n\tUsersDoc.Rev = rev\n\tUsersDoc.ID = id\n\n\treturn nil\n}", "func (i *Instance) save() error {\n\n\tif i.ID.Valid() {\n\t\tres := InstanceCollection.Find(db.Cond{\n\t\t\t\"_id\": i.ID,\n\t\t})\n\t\treturn res.Update(i)\n\t}\n\n\ti.CreatedAt = time.Now()\n\n\tid, err := InstanceCollection.Append(i)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ti.ID = id.(bson.ObjectId)\n\n\treturn nil\n}", "func (o ApplicationOperationSyncOutput) Revision() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationOperationSync) *string { return v.Revision }).(pulumi.StringPtrOutput)\n}", "func getRevision(sd []byte) BYTE {\n\tif len(sd) < 1 {\n\t\treturn 0\n\t}\n\n\t// SECURITY_DESCRIPTOR_RELATIVE.Revision.\n\treturn BYTE(sd[0])\n}", "func (r *DocumentRepository) Store(d document.Document) error {\n\tbs, err := json.Marshal(d)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn ioutil.WriteFile(docsPath+d.ID, bs, 0644)\n}", "func (reg *defaultRegistry) NewRevision(policyGen runtime.Generation, resolution *resolve.PolicyResolution, recalculateAll bool) (*engine.Revision, error) {\n\tcurrRevision, err := reg.GetRevision(runtime.LastOrEmptyGen)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error while getting last revision: %s\", err)\n\t}\n\n\tvar gen runtime.Generation\n\tif currRevision == nil {\n\t\tgen = runtime.FirstGen\n\t} else {\n\t\tgen = currRevision.GetGeneration().Next()\n\t}\n\n\t// create revision\n\trevision := engine.NewRevision(gen, policyGen, recalculateAll)\n\n\t// save revision\n\t_, err = reg.store.Save(revision)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error while saving new revision: %s\", err)\n\t}\n\n\t// save desired state\n\tdesiredState := engine.NewDesiredState(revision, resolution)\n\t_, err = reg.store.Save(desiredState)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error while saving desired state for new revision: %s\", err)\n\t}\n\n\treturn revision, nil\n}", "func (c *Crawler) Save() error {\n\tsession := mongoSession.Clone()\n\tdefer session.Close()\n\tcollection := session.DB(mongoDialInfo.Database).C(crawlersCollectionName)\n\tc.ID = bson.NewObjectId()\n\treturn collection.Insert(c)\n}", "func (db *DB) Rev(id string) (string, error) {\n\treturn responseRev(db.closedRequest(db.ctx, \"HEAD\", path(db.name, id), nil))\n}", "func (o ApplicationStatusOperationStateOperationSyncOutput) Revision() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusOperationStateOperationSync) *string { return v.Revision }).(pulumi.StringPtrOutput)\n}", "func NewObjectId() string {\n\tvar b [12]byte\n\t// Timestamp, 4 bytes, big endian\n\tbinary.BigEndian.PutUint32(b[:], uint32(time.Now().Unix()))\n\t// Machine, first 3 bytes of md5(hostname)\n\tb[4] = machineId[0]\n\tb[5] = machineId[1]\n\tb[6] = machineId[2]\n\t// Pid, 2 bytes, specs don't specify endianness, but we use big endian.\n\tb[7] = byte(processId >> 8)\n\tb[8] = byte(processId)\n\t// Increment, 3 bytes, big endian\n\ti := atomic.AddUint32(&objectIdCounter, 1)\n\tb[9] = byte(i >> 16)\n\tb[10] = byte(i >> 8)\n\tb[11] = byte(i)\n\treturn hex.EncodeToString(b[:])\n}", "func (this *activitiesStruct) Revision() uint64 {\n\tthis.mutex.RLock()\n\trev := this.revision\n\tthis.mutex.RUnlock()\n\treturn rev\n}", "func RespRevID(t *testing.T, response *TestResponse) (revID string) {\n\tvar r struct {\n\t\tRevID *string `json:\"rev\"`\n\t}\n\trequire.NoError(t, json.Unmarshal(response.BodyBytes(), &r), \"couldn't decode JSON from response body\")\n\trequire.NotNil(t, r.RevID, \"expecting non-nil rev ID from response: %s\", string(response.BodyBytes()))\n\trequire.NotEqual(t, \"\", *r.RevID, \"expecting non-empty rev ID from response: %s\", string(response.BodyBytes()))\n\treturn *r.RevID\n}", "func (m *SQLIgnoreStore) Revision() int64 {\n\tm.mutex.Lock()\n\tdefer m.mutex.Unlock()\n\treturn m.revision\n}", "func parseObjectID(id string, r *repo.Repository) (repo.ObjectID, error) {\n\thead, tail := splitHeadTail(id)\n\tif len(head) == 0 {\n\t\treturn repo.NullObjectID, fmt.Errorf(\"invalid object ID: %v\", id)\n\t}\n\n\toid, err := repo.ParseObjectID(head)\n\tif err != nil {\n\t\treturn repo.NullObjectID, fmt.Errorf(\"can't parse object ID %v: %v\", head, err)\n\t}\n\n\tif tail == \"\" {\n\t\treturn oid, nil\n\t}\n\n\tdir := repofs.Directory(r, oid)\n\tif err != nil {\n\t\treturn repo.NullObjectID, err\n\t}\n\n\treturn parseNestedObjectID(dir, tail)\n}", "func TestUpdateDocumentsRevision(t *testing.T) {\n\tctx := context.Background()\n\tc := createClient(t, nil)\n\tdb := ensureDatabase(ctx, c, \"document_test\", nil, t)\n\tcol := ensureCollection(ctx, db, \"documents_test\", nil, t)\n\tdocs := []UserDoc{\n\t\t{\n\t\t\t\"Revision\",\n\t\t\t33,\n\t\t},\n\t\t{\n\t\t\t\"Revision2\",\n\t\t\t34,\n\t\t},\n\t}\n\tmetas, errs, err := col.CreateDocuments(ctx, docs)\n\tif err != nil {\n\t\tt.Fatalf(\"Failed to create new document: %s\", describe(err))\n\t} else if len(metas) != len(docs) {\n\t\tt.Fatalf(\"Expected %d metas, got %d\", len(docs), len(metas))\n\t} else if err := errs.FirstNonNil(); err != nil {\n\t\tt.Fatalf(\"Expected no errors, got first: %s\", describe(err))\n\t}\n\n\t// Update documents with correct revisions\n\tupdates := []map[string]interface{}{\n\t\t{\n\t\t\t\"age\": 34,\n\t\t},\n\t\t{\n\t\t\t\"age\": 77,\n\t\t},\n\t}\n\tinitialRevCtx := driver.WithRevisions(ctx, metas.Revs())\n\tvar updatedRevCtx context.Context\n\tif metas2, _, err := col.UpdateDocuments(initialRevCtx, metas.Keys(), updates); err != nil {\n\t\tt.Fatalf(\"Failed to update documents: %s\", describe(err))\n\t} else {\n\t\tupdatedRevCtx = driver.WithRevisions(ctx, metas2.Revs())\n\t\tif strings.Join(metas2.Revs(), \",\") == strings.Join(metas.Revs(), \",\") {\n\t\t\tt.Errorf(\"Expected revision to change, got initial revision '%s', updated revision '%s'\", strings.Join(metas.Revs(), \",\"), strings.Join(metas2.Revs(), \",\"))\n\t\t}\n\t}\n\n\t// Update documents with incorrect revisions\n\tupdates[0][\"age\"] = 35\n\tvar rawResponse []byte\n\tif _, errs, err := col.UpdateDocuments(driver.WithRawResponse(initialRevCtx, &rawResponse), metas.Keys(), updates); err != nil {\n\t\tt.Errorf(\"Expected success, got %s\", describe(err))\n\t} else {\n\t\tfor _, err := range errs {\n\t\t\tif !driver.IsPreconditionFailed(err) {\n\t\t\t\tt.Errorf(\"Expected PreconditionFailedError, got %s (resp: %s\", describe(err), string(rawResponse))\n\t\t\t}\n\t\t}\n\t}\n\n\t// Update documents once more with correct revisions\n\tupdates[0][\"age\"] = 36\n\tif _, _, err := col.UpdateDocuments(updatedRevCtx, metas.Keys(), updates); err != nil {\n\t\tt.Errorf(\"Expected success, got %s\", describe(err))\n\t}\n}", "func (o DocumentDbOutputDataSourceOutput) DocumentId() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v DocumentDbOutputDataSource) *string { return v.DocumentId }).(pulumi.StringPtrOutput)\n}", "func (bt *BlipTester) GetDocAtRev(requestedDocID, requestedDocRev string) (resultDoc RestDocument, err error) {\n\n\tdocs := map[string]RestDocument{}\n\tchangesFinishedWg := sync.WaitGroup{}\n\trevsFinishedWg := sync.WaitGroup{}\n\n\tdefer func() {\n\t\t// Clean up all profile handlers that are registered as part of this test\n\t\tdelete(bt.blipContext.HandlerForProfile, \"changes\")\n\t\tdelete(bt.blipContext.HandlerForProfile, \"rev\")\n\t}()\n\n\t// -------- Changes handler callback --------\n\tbt.blipContext.HandlerForProfile[\"changes\"] = getChangesHandler(&changesFinishedWg, &revsFinishedWg)\n\n\t// -------- Rev handler callback --------\n\tbt.blipContext.HandlerForProfile[\"rev\"] = func(request *blip.Message) {\n\n\t\tdefer revsFinishedWg.Done()\n\t\tbody, err := request.Body()\n\t\tif err != nil {\n\t\t\tpanic(fmt.Sprintf(\"Unexpected err getting request body: %v\", err))\n\t\t}\n\t\tvar doc RestDocument\n\t\terr = base.JSONUnmarshal(body, &doc)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Sprintf(\"Unexpected err: %v\", err))\n\t\t}\n\t\tdocId := request.Properties[\"id\"]\n\t\tdocRev := request.Properties[\"rev\"]\n\t\tdoc.SetID(docId)\n\t\tdoc.SetRevID(docRev)\n\t\tdocs[docId] = doc\n\n\t\tif docId == requestedDocID && docRev == requestedDocRev {\n\t\t\tresultDoc = doc\n\t\t}\n\n\t}\n\n\t// Send subChanges to subscribe to changes, which will cause the \"changes\" profile handler above to be called back\n\tchangesFinishedWg.Add(1)\n\tsubChangesRequest := blip.NewRequest()\n\tsubChangesRequest.SetProfile(\"subChanges\")\n\tsubChangesRequest.Properties[\"continuous\"] = \"false\"\n\tbt.addCollectionProperty(subChangesRequest)\n\n\tsent := bt.sender.Send(subChangesRequest)\n\tif !sent {\n\t\tpanic(\"Unable to subscribe to changes.\")\n\t}\n\n\tchangesFinishedWg.Wait()\n\trevsFinishedWg.Wait()\n\n\treturn resultDoc, nil\n\n}", "func (o MongoDBCollectionResourceOutput) Id() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MongoDBCollectionResource) string { return v.Id }).(pulumi.StringOutput)\n}", "func (this *Device) GetRevision() (uint32, error) {\n\t// Return cached version\n\tif this.revision != GENCMD_REVISION_NONE {\n\t\treturn this.revision, nil\n\t}\n\n\t// Get embedded memory\n\totp, err := this.GetOTP()\n\tif err != nil {\n\t\treturn GENCMD_REVISION_NONE, err\n\t}\n\n\t// Cache and return revision number\n\tthis.revision = uint32(otp[GENCMD_OTP_DUMP_REVISION])\n\treturn this.revision, nil\n}", "func (s *SharedMemory) Revision() int {\n\treturn int(s.shmem.dwRevision)\n}", "func NewObjectID() ObjectID{\n\tres:=(ObjectID{0,nil})\n\tres.IDArray=make(map[string]interface{},constants.MAXOBJECTS)\n\treturn res\n}" ]
[ "0.563724", "0.5617376", "0.55635923", "0.5479824", "0.5410942", "0.5228088", "0.52259517", "0.5212826", "0.5212167", "0.5203062", "0.51842463", "0.5176076", "0.5168122", "0.5159967", "0.5131913", "0.50879925", "0.50724274", "0.5038259", "0.5036658", "0.5026574", "0.5024024", "0.50218034", "0.50090164", "0.5008448", "0.5005866", "0.49749717", "0.495827", "0.4946529", "0.4946156", "0.49400753", "0.49274427", "0.4925672", "0.4911286", "0.49058744", "0.48987907", "0.48904407", "0.4886361", "0.48672226", "0.48583624", "0.48176712", "0.48115027", "0.47955862", "0.4779462", "0.47730687", "0.47508797", "0.47495475", "0.47457355", "0.47413948", "0.4736561", "0.47361627", "0.47270843", "0.4723441", "0.47154817", "0.4711508", "0.47048882", "0.47046125", "0.47002852", "0.46974647", "0.46960455", "0.4695876", "0.46884063", "0.46846476", "0.467838", "0.4678168", "0.46723703", "0.4671938", "0.46705368", "0.46697333", "0.46688798", "0.4666236", "0.46650425", "0.46495974", "0.46464163", "0.46441647", "0.4639635", "0.46349907", "0.4632689", "0.46318305", "0.4630534", "0.46289662", "0.4624482", "0.46166164", "0.4616422", "0.46127057", "0.4603968", "0.46016198", "0.45845622", "0.45690998", "0.45689082", "0.45684764", "0.4562098", "0.4559283", "0.45517007", "0.45504683", "0.4550384", "0.45442095", "0.45401245", "0.45381275", "0.45295858", "0.45286277" ]
0.5280892
5
MarshalClusterRegistrationList writes a list of values of the 'cluster_registration' type to the given writer.
func MarshalClusterRegistrationList(list []*ClusterRegistration, writer io.Writer) error { stream := helpers.NewStream(writer) writeClusterRegistrationList(list, stream) stream.Flush() return stream.Error }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func writeClusterRegistrationList(list []*ClusterRegistration, stream *jsoniter.Stream) {\n\tstream.WriteArrayStart()\n\tfor i, value := range list {\n\t\tif i > 0 {\n\t\t\tstream.WriteMore()\n\t\t}\n\t\twriteClusterRegistration(value, stream)\n\t}\n\tstream.WriteArrayEnd()\n}", "func readClusterRegistrationList(iterator *jsoniter.Iterator) []*ClusterRegistration {\n\tlist := []*ClusterRegistration{}\n\tfor iterator.ReadArray() {\n\t\titem := readClusterRegistration(iterator)\n\t\tlist = append(list, item)\n\t}\n\treturn list\n}", "func (o OpenShiftClusterList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", o.NextLink)\n\tpopulate(objectMap, \"value\", o.Value)\n\treturn json.Marshal(objectMap)\n}", "func UnmarshalClusterRegistrationList(source interface{}) (items []*ClusterRegistration, err error) {\n\titerator, err := helpers.NewIterator(source)\n\tif err != nil {\n\t\treturn\n\t}\n\titems = readClusterRegistrationList(iterator)\n\terr = iterator.Error\n\treturn\n}", "func (c Cluster) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif c.Identity != nil {\n\t\tobjectMap[\"identity\"] = c.Identity\n\t}\n\tif c.Sku != nil {\n\t\tobjectMap[\"sku\"] = c.Sku\n\t}\n\tif c.ClusterProperties != nil {\n\t\tobjectMap[\"properties\"] = c.ClusterProperties\n\t}\n\tif c.Tags != nil {\n\t\tobjectMap[\"tags\"] = c.Tags\n\t}\n\tif c.Location != nil {\n\t\tobjectMap[\"location\"] = c.Location\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (o OpenShiftCluster) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"id\", o.ID)\n\tpopulate(objectMap, \"location\", o.Location)\n\tpopulate(objectMap, \"name\", o.Name)\n\tpopulate(objectMap, \"properties\", o.Properties)\n\tpopulate(objectMap, \"systemData\", o.SystemData)\n\tpopulate(objectMap, \"tags\", o.Tags)\n\tpopulate(objectMap, \"type\", o.Type)\n\treturn json.Marshal(objectMap)\n}", "func (m *Manager) ListCluster() error {\n\tclusters, err := m.GetClusterList()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tswitch m.logger.GetDisplayMode() {\n\tcase logprinter.DisplayModeJSON:\n\t\tclusterObj := struct {\n\t\t\tClusters []Cluster `json:\"clusters\"`\n\t\t}{\n\t\t\tClusters: clusters,\n\t\t}\n\t\tdata, err := json.Marshal(clusterObj)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(string(data))\n\tdefault:\n\t\tclusterTable := [][]string{\n\t\t\t// Header\n\t\t\t{\"Name\", \"User\", \"Version\", \"Path\", \"PrivateKey\"},\n\t\t}\n\t\tfor _, v := range clusters {\n\t\t\tclusterTable = append(clusterTable, []string{\n\t\t\t\tv.Name,\n\t\t\t\tv.User,\n\t\t\t\tv.Version,\n\t\t\t\tv.Path,\n\t\t\t\tv.PrivateKey,\n\t\t\t})\n\t\t}\n\t\ttui.PrintTable(clusterTable, true)\n\t}\n\treturn nil\n}", "func (c ClusterProfile) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"domain\", c.Domain)\n\tpopulate(objectMap, \"fipsValidatedModules\", c.FipsValidatedModules)\n\tpopulate(objectMap, \"pullSecret\", c.PullSecret)\n\tpopulate(objectMap, \"resourceGroupId\", c.ResourceGroupID)\n\tpopulate(objectMap, \"version\", c.Version)\n\treturn json.Marshal(objectMap)\n}", "func (ssr SQLServerRegistration) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif ssr.SQLServerRegistrationProperties != nil {\n\t\tobjectMap[\"properties\"] = ssr.SQLServerRegistrationProperties\n\t}\n\tif ssr.Location != nil {\n\t\tobjectMap[\"location\"] = ssr.Location\n\t}\n\tif ssr.Tags != nil {\n\t\tobjectMap[\"tags\"] = ssr.Tags\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (o *WatchRbacAuthorizationV1ClusterRoleListOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(200)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func provisionerList(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tprovs, err := provision.Registry()\n\tif err != nil {\n\t\treturn err\n\t}\n\tinfo := make([]provisionerInfo, len(provs))\n\tfor i, p := range provs {\n\t\tinfo[i].Name = p.GetName()\n\t\tif clusterProv, ok := p.(cluster.ClusteredProvisioner); ok {\n\t\t\tinfo[i].ClusterHelp = clusterProv.ClusterHelp()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(info)\n}", "func MarshalClusterConfig(clusterConfig ClusterConfig, opts ...MarshalOption) ([]byte, error) {\n\tcfg, err := CollectOptions(opts)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\tswitch clusterConfig := clusterConfig.(type) {\n\tcase *ClusterConfigV3:\n\t\tif version := clusterConfig.GetVersion(); version != V3 {\n\t\t\treturn nil, trace.BadParameter(\"mismatched cluster config version %v and type %T\", version, clusterConfig)\n\t\t}\n\t\tif !cfg.PreserveResourceID {\n\t\t\t// avoid modifying the original object\n\t\t\t// to prevent unexpected data races\n\t\t\tcopy := *clusterConfig\n\t\t\tcopy.SetResourceID(0)\n\t\t\tclusterConfig = &copy\n\t\t}\n\t\treturn utils.FastMarshal(clusterConfig)\n\tdefault:\n\t\treturn nil, trace.BadParameter(\"unrecognized cluster config version %T\", clusterConfig)\n\t}\n}", "func (cp ClusterProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif cp.NextLink != nil {\n\t\tobjectMap[\"nextLink\"] = cp.NextLink\n\t}\n\tif cp.KeyVaultProperties != nil {\n\t\tobjectMap[\"keyVaultProperties\"] = cp.KeyVaultProperties\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (s SubscriptionFeatureRegistrationList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (o OpenShiftClusterCredentials) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"kubeadminPassword\", o.KubeadminPassword)\n\tpopulate(objectMap, \"kubeadminUsername\", o.KubeadminUsername)\n\treturn json.Marshal(objectMap)\n}", "func (o OpenShiftClusterProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"apiserverProfile\", o.ApiserverProfile)\n\tpopulate(objectMap, \"clusterProfile\", o.ClusterProfile)\n\tpopulate(objectMap, \"consoleProfile\", o.ConsoleProfile)\n\tpopulate(objectMap, \"ingressProfiles\", o.IngressProfiles)\n\tpopulate(objectMap, \"masterProfile\", o.MasterProfile)\n\tpopulate(objectMap, \"networkProfile\", o.NetworkProfile)\n\tpopulate(objectMap, \"provisioningState\", o.ProvisioningState)\n\tpopulate(objectMap, \"servicePrincipalProfile\", o.ServicePrincipalProfile)\n\tpopulate(objectMap, \"workerProfiles\", o.WorkerProfiles)\n\treturn json.Marshal(objectMap)\n}", "func (m *WindowsInformationProtectionDeviceRegistration) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"deviceMacAddress\", m.GetDeviceMacAddress())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceName\", m.GetDeviceName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceRegistrationId\", m.GetDeviceRegistrationId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceType\", m.GetDeviceType())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastCheckInDateTime\", m.GetLastCheckInDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"userId\", m.GetUserId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (c *cache) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif c == nil || c.controllerCache == nil {\n\t\treturn nil, fmt.Errorf(cacheNotInitializedErr)\n\t}\n\tappRegList := &storkv1alpha1.ApplicationRegistrationList{}\n\tif err := c.controllerCache.List(context.Background(), appRegList); err != nil {\n\t\treturn nil, err\n\t}\n\treturn appRegList, nil\n}", "func (ehcglr EventHubConsumerGroupsListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif ehcglr.Value != nil {\n\t\tobjectMap[\"value\"] = ehcglr.Value\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (cp ClusterPatch) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif cp.ClusterPatchProperties != nil {\n\t\tobjectMap[\"properties\"] = cp.ClusterPatchProperties\n\t}\n\tif cp.Sku != nil {\n\t\tobjectMap[\"sku\"] = cp.Sku\n\t}\n\tif cp.Tags != nil {\n\t\tobjectMap[\"tags\"] = cp.Tags\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (providerRegistrationManager *ProviderRegistrationManager) PostProviderRegistrationListSettings(request *restful.Request, response *restful.Response) {\n\tfullyQualifiedResourceID := engines.GetFullyQualifiedProviderRegistrationID(request)\n\n\t// Get Document from collection\n\tproviderRegistrationPackage := entities.ProviderRegistrationPackage{}\n\terr := providerRegistrationManager.ProviderRegistrationDataProvider.FindPackage(fullyQualifiedResourceID, &providerRegistrationPackage)\n\tif err != nil {\n\t\tapierror.WriteErrorToResponse(\n\t\t\tresponse,\n\t\t\thttp.StatusNotFound,\n\t\t\tapierror.ClientError,\n\t\t\tapierror.NotFound,\n\t\t\terr.Error())\n\t\treturn\n\t}\n\n\tresponseContent, err := json.Marshal(providerRegistrationPackage.ToListSettingsDefinition())\n\tif err != nil {\n\t\tapierror.WriteErrorToResponse(\n\t\t\tresponse,\n\t\t\thttp.StatusInternalServerError,\n\t\t\tapierror.InternalError,\n\t\t\tapierror.InternalOperationError,\n\t\t\tfmt.Sprintf(\"Failed to serialize provider registration package: %s\", err.Error()))\n\t\treturn\n\t}\n\tresponse.Header().Set(restful.HEADER_ContentType, restful.MIME_JSON)\n\tresponse.Write(responseContent)\n}", "func (o OpenShiftClusterAdminKubeconfig) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"kubeconfig\", o.Kubeconfig)\n\treturn json.Marshal(objectMap)\n}", "func (ce ClusterEvent) MarshalJSON() ([]byte, error) {\n\tce.Kind = KindClusterEvent\n\tobjectMap := make(map[string]interface{})\n\tif ce.EventInstanceID != nil {\n\t\tobjectMap[\"EventInstanceId\"] = ce.EventInstanceID\n\t}\n\tif ce.TimeStamp != nil {\n\t\tobjectMap[\"TimeStamp\"] = ce.TimeStamp\n\t}\n\tif ce.HasCorrelatedEvents != nil {\n\t\tobjectMap[\"HasCorrelatedEvents\"] = ce.HasCorrelatedEvents\n\t}\n\tif ce.Kind != \"\" {\n\t\tobjectMap[\"Kind\"] = ce.Kind\n\t}\n\treturn json.Marshal(objectMap)\n}", "func Convert_v1alpha1_ControllerRegistrationList_To_core_ControllerRegistrationList(in *ControllerRegistrationList, out *core.ControllerRegistrationList, s conversion.Scope) error {\n\treturn autoConvert_v1alpha1_ControllerRegistrationList_To_core_ControllerRegistrationList(in, out, s)\n}", "func (n NotificationChannelList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", n.NextLink)\n\tpopulate(objectMap, \"value\", n.Value)\n\treturn json.Marshal(objectMap)\n}", "func SerializeKubeCluster(cluster types.KubeCluster) ([]byte, error) {\n\ta := struct {\n\t\tName string `yaml:\"name\"`\n\t\tKind string `yaml:\"kind\"`\n\t\tSubKind string `yaml:\"subkind\"`\n\t\tLabels map[string]string `yaml:\"labels\"`\n\t}{\n\t\tName: cluster.GetName(),\n\t\tKind: types.KindKubernetesCluster,\n\t\tSubKind: cluster.GetSubKind(),\n\t\tLabels: cluster.GetAllLabels(),\n\t}\n\ttext, err := yaml.Marshal(&a)\n\treturn text, trace.Wrap(err)\n}", "func (l LineRegistration) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"channelAccessToken\", l.ChannelAccessToken)\n\tpopulate(objectMap, \"channelSecret\", l.ChannelSecret)\n\tpopulate(objectMap, \"generatedId\", l.GeneratedID)\n\treturn json.Marshal(objectMap)\n}", "func WriteStringList(w io.Writer, l []string) error {\n\treturn writeStringList(NewCompression1Writer(w), l, nil, true, false)\n}", "func (e ExpressRoutePortAuthorizationListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", e.NextLink)\n\tpopulate(objectMap, \"value\", e.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s SharedAccessSignatureAuthorizationRuleListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (sasarlr SharedAccessSignatureAuthorizationRuleListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif sasarlr.Value != nil {\n\t\tobjectMap[\"value\"] = sasarlr.Value\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (s ServiceFabricList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (client DeploymentsClient) ListForClusterSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}", "func listClusters(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tctx := r.Context()\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tclusters, err := servicemanager.Cluster.List(ctx)\n\tif err != nil {\n\t\tif err == provTypes.ErrNoCluster {\n\t\t\tw.WriteHeader(http.StatusNoContent)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\tadmin := permission.Check(t, permission.PermClusterAdmin)\n\tif !admin {\n\t\tfor i := range clusters {\n\t\t\tclusters[i].CleanUpSensitive()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(clusters)\n}", "func (m *ManagedDeviceComplianceCollectionResponse) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.BaseCollectionPaginationCountResponse.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetValue() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetValue()))\n for i, v := range m.GetValue() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"value\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (adm Admin) ListClusterInfo(cluster string) (string, error) {\n\t// make sure the cluster is already setup\n\tif ok, err := adm.isClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tbuilder := KeyBuilder{cluster}\n\tisPath := builder.idealStates()\n\tinstancesPath := builder.instances()\n\n\tresources, err := adm.zkClient.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinstances, err := adm.zkClient.Children(instancesPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Existing resources in cluster \" + cluster + \":\\n\")\n\n\tfor _, r := range resources {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\tbuffer.WriteString(\"\\nInstances in cluster \" + cluster + \":\\n\")\n\tfor _, i := range instances {\n\t\tbuffer.WriteString(\" \" + i + \"\\n\")\n\t}\n\treturn buffer.String(), nil\n}", "func (c CommunicationsGatewayListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", c.NextLink)\n\tpopulate(objectMap, \"value\", c.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s SharedAccessAuthorizationRuleListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (v NotificationList) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson9806e1EncodeGithubComKonstantinProninEmailSendingServicePkgModel(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "func (e EventHubConsumerGroupsListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", e.NextLink)\n\tpopulate(objectMap, \"value\", e.Value)\n\treturn json.Marshal(objectMap)\n}", "func (p PartnerRegistration) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"id\", p.ID)\n\tpopulate(objectMap, \"location\", p.Location)\n\tpopulate(objectMap, \"name\", p.Name)\n\tpopulate(objectMap, \"properties\", p.Properties)\n\tpopulate(objectMap, \"systemData\", p.SystemData)\n\tpopulate(objectMap, \"tags\", p.Tags)\n\tpopulate(objectMap, \"type\", p.Type)\n\treturn json.Marshal(objectMap)\n}", "func (c ChannelResponseList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", c.NextLink)\n\tpopulate(objectMap, \"value\", c.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s SecurityGroupListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (this *EnvoyFilter_ClusterMatch) MarshalJSON() ([]byte, error) {\n\tstr, err := EnvoyFilterMarshaler.MarshalToString(this)\n\treturn []byte(str), err\n}", "func (*OktetoClusterHelper) List() (map[string]string, error) {\n\treturn nil, ErrNotImplemented\n}", "func (c *Client) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif err := c.initClient(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn c.stork.StorkV1alpha1().ApplicationRegistrations().List(context.TODO(), metav1.ListOptions{})\n\n}", "func List(helper Helper, writer io.Writer) error {\n\taccts, err := helper.List()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn json.NewEncoder(writer).Encode(accts)\n}", "func (adm Admin) ListClusterInfo(cluster string) (string, error) {\n\tconn := newConnection(adm.ZkSvr)\n\terr := conn.Connect()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer conn.Disconnect()\n\n\t// make sure the cluster is already setup\n\tif ok, err := conn.IsClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tkeys := KeyBuilder{cluster}\n\tisPath := keys.idealStates()\n\tinstancesPath := keys.instances()\n\n\tresources, err := conn.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinstances, err := conn.Children(instancesPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Existing resources in cluster \" + cluster + \":\\n\")\n\n\tfor _, r := range resources {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\tbuffer.WriteString(\"\\nInstances in cluster \" + cluster + \":\\n\")\n\tfor _, i := range instances {\n\t\tbuffer.WriteString(\" \" + i + \"\\n\")\n\t}\n\treturn buffer.String(), nil\n}", "func (s SAPCentralInstanceList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (d DscpConfigurationListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", d.NextLink)\n\tpopulate(objectMap, \"value\", d.Value)\n\treturn json.Marshal(objectMap)\n}", "func (w *Writer) WriteList(lst *list.List) {\n\tptr := unsafe.Pointer(lst)\n\ttyp := reflect.TypeOf(lst)\n\tif writeRef(w, ptr, typ) {\n\t\treturn\n\t}\n\tsetWriterRef(w, ptr, typ)\n\tcount := lst.Len()\n\tif count == 0 {\n\t\twriteEmptyList(w)\n\t\treturn\n\t}\n\twriteListHeader(w, count)\n\tfor e := lst.Front(); e != nil; e = e.Next() {\n\t\tw.Serialize(e.Value)\n\t}\n\twriteListFooter(w)\n}", "func (p *PubKey) OutputList(to io.Writer) int {\n\tsb := new(strings.Builder)\n\tfor u, _ := range p.users {\n\t\tfor i, _ := range p.users[u].Keys {\n\t\t\tk := p.users[u].Keys[i]\n\t\t\tif k.Key != nil {\n\t\t\t\tsb.WriteString(fmt.Sprintf(\"%s %s\\n\", *k.Key, u))\n\t\t\t}\n\t\t}\n\t}\n\tfmt.Fprint(to, sb.String())\n\treturn 0\n}", "func EncodeClusterSpec(spec *VsphereClusterProviderConfig) (*runtime.RawExtension, error) {\n\tif spec == nil {\n\t\treturn &runtime.RawExtension{}, nil\n\t}\n\n\tvar rawBytes []byte\n\tvar err error\n\n\t// TODO: use apimachinery conversion https://godoc.org/k8s.io/apimachinery/pkg/runtime#Convert_runtime_Object_To_runtime_RawExtension\n\tif rawBytes, err = json.Marshal(spec); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &runtime.RawExtension{\n\t\tRaw: rawBytes,\n\t\tObject: spec,\n\t}, nil\n}", "func (s *ClusterStatus) MarshalJSON() ([]byte, error) {\n\tbuffer := bytes.NewBufferString(`\"`)\n\tbuffer.WriteString(s.String())\n\tbuffer.WriteString(`\"`)\n\treturn buffer.Bytes(), nil\n}", "func (sl SketchSeriesList) Marshal() ([]byte, error) {\n\tpayload := &agentpayload.SketchPayload{\n\t\tSketches: []agentpayload.SketchPayload_Sketch{},\n\t\tMetadata: agentpayload.CommonMetadata{},\n\t}\n\tfor _, s := range sl {\n\t\tpayload.Sketches = append(payload.Sketches,\n\t\t\tagentpayload.SketchPayload_Sketch{\n\t\t\t\tMetric: s.Name,\n\t\t\t\tHost: s.Host,\n\t\t\t\tDistributions: marshalSketch(s.Sketches),\n\t\t\t\tTags: s.Tags,\n\t\t\t})\n\t}\n\treturn proto.Marshal(payload)\n}", "func RegisterCluster(\n\tctx context.Context,\n\tbinaryName string,\n\tflags *pflag.FlagSet,\n\tclientsFactory common.ClientsFactory,\n\tkubeClientsFactory common.KubeClientsFactory,\n\topts *options.Options,\n\tout io.Writer,\n\tkubeLoader common_config.KubeLoader,\n) error {\n\n\tif err := cluster_internal.VerifyRemoteContextFlags(opts); err != nil {\n\t\treturn err\n\t}\n\n\tif err := cluster_internal.VerifyMasterCluster(clientsFactory, opts); err != nil {\n\t\treturn err\n\t}\n\n\tregisterOpts := opts.Cluster.Register\n\n\t// set up kube clients for the master cluster\n\tmasterCfg, err := kubeLoader.GetRestConfigForContext(opts.Root.KubeConfig, opts.Root.KubeContext)\n\tif err != nil {\n\t\treturn common.FailedLoadingMasterConfig(err)\n\t}\n\tmasterKubeClients, err := kubeClientsFactory(masterCfg, opts.Root.WriteNamespace)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set up kube clients for the remote cluster\n\n\t// default the remote kube config/context to the root settings\n\tremoteConfigPath, remoteContext := opts.Root.KubeConfig, opts.Root.KubeContext\n\tif registerOpts.RemoteKubeConfig != \"\" {\n\t\t// if we specified a kube config for the remote cluster, use that instead\n\t\tremoteConfigPath = registerOpts.RemoteKubeConfig\n\t}\n\n\t// if we didn't have a context from the root, or if we had an override for the\n\t// remote context, use the remote context instead\n\tif remoteContext == \"\" || registerOpts.RemoteContext != \"\" {\n\t\tremoteContext = registerOpts.RemoteContext\n\t}\n\n\tremoteCfg, err := kubeLoader.GetRestConfigForContext(remoteConfigPath, remoteContext)\n\tif err != nil {\n\t\treturn FailedLoadingRemoteConfig(err)\n\t}\n\tremoteKubeClients, err := kubeClientsFactory(remoteCfg, registerOpts.RemoteWriteNamespace)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// if overwrite returns ok than the program should continue, else return\n\t// The reason for the 2 return vars is that err may be nil and returned anyway\n\tif ok, err := shouldOverwrite(ctx, binaryName, flags, out, opts, masterKubeClients); !ok {\n\t\treturn err\n\t}\n\n\tif err = ensureRemoteNamespace(ctx, registerOpts.RemoteWriteNamespace, remoteKubeClients); err != nil {\n\t\treturn err\n\t}\n\n\tbearerTokenForServiceAccount, err := generateServiceAccountBearerToken(ctx, out, remoteKubeClients, remoteCfg, registerOpts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(out, \"Successfully wrote service account to remote cluster...\\n\")\n\n\tclients, err := clientsFactory(opts)\n\tif err != nil {\n\t\treturn eris.Errorf(\"Unexpected error: Clients should have already been built, but failed to build again\")\n\t}\n\t// Install CRDs to remote cluster. This must happen BEFORE kubeconfig Secret is written to master cluster because\n\t// relevant CRDs must exist before SMH attempts any cross cluster functionality.\n\tcsrAgentInstaller := clients.ClusterRegistrationClients.CsrAgentInstallerFactory(remoteKubeClients.HelmInstaller, masterKubeClients.DeployedVersionFinder)\n\terr = csrAgentInstaller.Install(\n\t\tctx,\n\t\t&csr.CsrAgentInstallOptions{\n\t\t\tKubeConfig: remoteConfigPath,\n\t\t\tKubeContext: remoteContext,\n\t\t\tClusterName: registerOpts.RemoteClusterName,\n\t\t\tSmhInstallNamespace: opts.Root.WriteNamespace,\n\t\t\tUseDevCsrAgentChart: registerOpts.UseDevCsrAgentChart,\n\t\t\tReleaseName: cliconstants.CsrAgentReleaseName,\n\t\t\tRemoteWriteNamespace: registerOpts.RemoteWriteNamespace,\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(out, \"Successfully set up CSR agent...\\n\")\n\n\t// Write kubeconfig Secret and KubeCluster CRD to master cluster\n\tsecret, err := writeKubeConfigToMaster(\n\t\tctx,\n\t\topts.Root.WriteNamespace,\n\t\tregisterOpts,\n\t\tremoteConfigPath,\n\t\tbearerTokenForServiceAccount,\n\t\tmasterKubeClients,\n\t\tkubeLoader,\n\t\tclients.KubeConverter,\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(out, \"Successfully wrote kube config secret to master cluster...\\n\")\n\terr = writeKubeClusterToMaster(ctx, masterKubeClients, opts.Root.WriteNamespace, registerOpts, secret)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Fprintf(\n\t\tout,\n\t\t\"\\nCluster %s is now registered in your Service Mesh Hub installation\\n\",\n\t\tregisterOpts.RemoteClusterName,\n\t)\n\n\treturn nil\n}", "func (client DeploymentsClient) ListForClusterResponder(resp *http.Response) (result DeploymentResourceCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (o OpenShiftClusterUpdate) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"properties\", o.Properties)\n\tpopulate(objectMap, \"systemData\", o.SystemData)\n\tpopulate(objectMap, \"tags\", o.Tags)\n\treturn json.Marshal(objectMap)\n}", "func (svc ServerlessClusterService) List(ctx context.Context) (*[]models.Cluster, *Response, error) {\n\tvar clusterList []models.Cluster\n\tgraphqlRequest := models.GraphqlRequest{\n\t\tName: \"clusters\",\n\t\tOperation: models.Query,\n\t\tInput: nil,\n\t\tArgs: models.ClusterListInput{\n\t\t\tProductType: models.Starter,\n\t\t},\n\t\tResponse: clusterList,\n\t}\n\treq, err := svc.client.NewRequest(&graphqlRequest)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tresp, err := svc.client.Do(ctx, req, &clusterList)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn &clusterList, resp, err\n}", "func (p PrivateDNSZoneGroupListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", p.NextLink)\n\tpopulate(objectMap, \"value\", p.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s SecurityAdminConfigurationListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (r RoleAssignmentList) MarshalJSON() ([]byte, error) {\n\ttype Alias RoleAssignmentList\n\treturn json.Marshal(\n\t\tstruct {\n\t\t\tmeta.TypeMeta `json:\",inline\"`\n\t\t\tAlias `json:\",inline\"`\n\t\t}{\n\t\t\tTypeMeta: meta.TypeMeta{\n\t\t\t\tAPIVersion: meta.APIVersion,\n\t\t\t\tKind: RoleAssignmentListKind,\n\t\t\t},\n\t\t\tAlias: (Alias)(r),\n\t\t},\n\t)\n}", "func (client DeploymentsClient) ListForCluster(ctx context.Context, resourceGroupName string, serviceName string, version []string) (result DeploymentResourceCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"/DeploymentsClient.ListForCluster\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.drc.Response.Response != nil {\n\t\t\t\tsc = result.drc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.fn = client.listForClusterNextResults\n\treq, err := client.ListForClusterPreparer(ctx, resourceGroupName, serviceName, version)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListForClusterSender(req)\n\tif err != nil {\n\t\tresult.drc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.drc, err = client.ListForClusterResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", resp, \"Failure responding to request\")\n\t\treturn\n\t}\n\tif result.drc.hasNextLink() && result.drc.IsEmpty() {\n\t\terr = result.NextWithContext(ctx)\n\t\treturn\n\t}\n\n\treturn\n}", "func (e *EventLogger) WriteSyncedLists() {\n\tdata, _ := json.Marshal(e.SyncedLists)\n\tif _, err := os.Stat(e.syncedListsConfigDir()); os.IsNotExist(err) {\n\t\tos.MkdirAll(e.syncedListsConfigDir(), os.ModePerm)\n\t\tif _, cerr := os.Create(e.syncedListsConfigDir()); cerr != nil {\n\t\t\tpanic(cerr)\n\t\t}\n\t}\n\n\tif err := ioutil.WriteFile(e.syncedListsFile(), data, 0644); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (f FirewallPolicyRuleCollectionGroupListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", f.NextLink)\n\tpopulate(objectMap, \"value\", f.Value)\n\treturn json.Marshal(objectMap)\n}", "func (t ListGroupsResponse) Encode(e *Encoder, version int16) {\n\tif version >= 1 {\n\t\te.PutInt32(t.ThrottleTimeMs) // ThrottleTimeMs\n\t}\n\te.PutInt16(t.ErrorCode) // ErrorCode\n\t// Groups\n\tlen2 := len(t.Groups)\n\te.PutArrayLength(len2)\n\tfor i := 0; i < len2; i++ {\n\t\tt.Groups[i].Encode(e, version)\n\t}\n}", "func (v watchlistSlice) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson3e8ab7adEncodeGithubComAlpacahqAlpacaTradeApiGoV3Alpaca(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "func (cr CommonWriter) WriteList(op thrift.TProtocol, lIst *idltypes.List, data []interface{}) error {\n\tif err := op.WriteListBegin(thrift.LIST, len(data)); err != nil {\n\t\treturn fmt.Errorf(\"error writing list begin: %s\", err)\n\t}\n\tfor _, v := range data {\n\t\tif err := cr.writeFieldValue(op, v, lIst.ValueType()); err != nil {\n\t\t\treturn fmt.Errorf(\"%s field write error: %s\", lIst.ValueType().Name(), err)\n\t\t}\n\t}\n\tif err := op.WriteListEnd(); err != nil {\n\t\treturn fmt.Errorf(\"error writing list end: %s\", err)\n\t}\n\treturn nil\n}", "func (a AuthorizationListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", a.NextLink)\n\tpopulate(objectMap, \"value\", a.Value)\n\treturn json.Marshal(objectMap)\n}", "func EncodeListResponse(_ context.Context, w http.ResponseWriter, response interface{}) (err error) {\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\te := json.NewEncoder(w)\n\te.SetIndent(\"\", \"\\t\")\n\terr = e.Encode(response)\n\treturn err\n}", "func (c *MultiClusterController) List(clusterName string, opts ...client.ListOption) (interface{}, error) {\n\tcluster := c.GetCluster(clusterName)\n\tif cluster == nil {\n\t\treturn nil, errors.NewClusterNotFound(clusterName)\n\t}\n\tinstanceList := utilscheme.Scheme.NewObjectList(c.objectType)\n\tdelegatingClient, err := cluster.GetDelegatingClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = delegatingClient.List(context.TODO(), instanceList, opts...)\n\treturn instanceList, err\n}", "func (a ApplicationSecurityGroupListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", a.NextLink)\n\tpopulate(objectMap, \"value\", a.Value)\n\treturn json.Marshal(objectMap)\n}", "func (r *AWSEMRCluster) MarshalJSON() ([]byte, error) {\n\ttype Properties AWSEMRCluster\n\treturn json.Marshal(&struct {\n\t\tType string\n\t\tProperties Properties\n\t}{\n\t\tType: r.AWSCloudFormationType(),\n\t\tProperties: (Properties)(*r),\n\t})\n}", "func RenderCluster(out io.Writer, cluster sind.ClusterStatus) {\n\twr := tabwriter.NewWriter(out, 4, 8, 2, '\\t', 0)\n\tdefer wr.Flush()\n\n\tfmt.Fprintf(\n\t\twr,\n\t\t\"Name: %s\\tStatus: %s\\tManagers: %s\\t Workers: %s\\t\\n\",\n\t\tstyle.Important(cluster.Name),\n\t\tstyle.Important(status(cluster)),\n\t\tstyle.Important(fmt.Sprintf(\"%d/%d\", cluster.ManagersRunning, cluster.Managers)),\n\t\tstyle.Important(fmt.Sprintf(\"%d/%d\", cluster.WorkersRunning, cluster.Workers)),\n\t)\n\n\tfmt.Fprintf(wr, \"ID\\tImage\\tRole\\tStatus\\tIPs\\t\\n\")\n\tfmt.Fprintf(wr, \"--\\t-----\\t----\\t------\\t---\\t\\n\")\n\n\tfor _, node := range cluster.Nodes {\n\t\tfmt.Fprintf(\n\t\t\twr,\n\t\t\t\"%s\\t%s\\t%s\\t%s\\t%s\\t\\n\",\n\t\t\tnode.ID[0:11],\n\t\t\tnode.Image,\n\t\t\tclusterRole(node),\n\t\t\tnode.Status,\n\t\t\tnodeIP(node),\n\t\t)\n\t}\n}", "func (c ConnectionMonitorListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"value\", c.Value)\n\treturn json.Marshal(objectMap)\n}", "func (c CertificateListDescription) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"value\", c.Value)\n\treturn json.Marshal(objectMap)\n}", "func (r RegisteredPrefixListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", r.NextLink)\n\tpopulate(objectMap, \"value\", r.Value)\n\treturn json.Marshal(objectMap)\n}", "func ListAllRegistrations(w http.ResponseWriter, r *http.Request) {\n\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\tname := r.URL.Query().Get(\"name\")\n\tstatus := r.URL.Query().Get(\"status\")\n\temail := r.URL.Query().Get(\"email\")\n\torg := r.URL.Query().Get(\"organization\")\n\tactivationToken := r.URL.Query().Get(\"activation_token\")\n\n\tur, err := auth.FindUserRegistrations(status, activationToken, name, email, org, refStr)\n\tif err != nil {\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\turb, err := json.MarshalIndent(ur, \"\", \" \")\n\tif err != nil {\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\trespondOK(w, urb)\n}", "func (e EffectiveNetworkSecurityGroupListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", e.NextLink)\n\tpopulate(objectMap, \"value\", e.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s ScheduleList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (p PartnerRegistrationProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"authorizedAzureSubscriptionIds\", p.AuthorizedAzureSubscriptionIDs)\n\tpopulate(objectMap, \"customerServiceUri\", p.CustomerServiceURI)\n\tpopulate(objectMap, \"logoUri\", p.LogoURI)\n\tpopulate(objectMap, \"longDescription\", p.LongDescription)\n\tpopulate(objectMap, \"partnerCustomerServiceExtension\", p.PartnerCustomerServiceExtension)\n\tpopulate(objectMap, \"partnerCustomerServiceNumber\", p.PartnerCustomerServiceNumber)\n\tpopulate(objectMap, \"partnerName\", p.PartnerName)\n\tpopulate(objectMap, \"partnerRegistrationImmutableId\", p.PartnerRegistrationImmutableID)\n\tpopulate(objectMap, \"partnerResourceTypeDescription\", p.PartnerResourceTypeDescription)\n\tpopulate(objectMap, \"partnerResourceTypeDisplayName\", p.PartnerResourceTypeDisplayName)\n\tpopulate(objectMap, \"partnerResourceTypeName\", p.PartnerResourceTypeName)\n\tpopulate(objectMap, \"provisioningState\", p.ProvisioningState)\n\tpopulate(objectMap, \"setupUri\", p.SetupURI)\n\tpopulate(objectMap, \"visibilityState\", p.VisibilityState)\n\treturn json.Marshal(objectMap)\n}", "func (m *SvcConfigDiscoveryResponse) MarshalJSON() ([]byte, error) {\n\tif m == nil {\n\t\treturn json.Marshal(nil)\n\t}\n\tbuf := &bytes.Buffer{}\n\tif err := SvcConfigDiscoveryResponseJSONMarshaler.Marshal(buf, m); err != nil {\n\t return nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func writeValueList(w io.Writer, byteOrder binary.ByteOrder, valueList []interface{}) (err error) {\n\tfor _, value := range valueList {\n\n\t\tswitch value.(type) {\n\t\tcase uint8, int8, bool, uint32, int32, uint64, int64, float32, float64:\n\t\t\t// fixed size\n\t\t\terr = binary.Write(w, byteOrder, value)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase string:\n\t\t\t// length prefix\n\t\t\tvar l uint32\n\t\t\tstr := value.(string)\n\t\t\tl = uint32(len(str))\n\t\t\terr = binary.Write(w, byteOrder, l)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// string data\n\t\t\t_, err = w.Write([]byte(str))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (v CFListType) MarshalBinary() ([]byte, error) {\n\treturn marshalBinaryEnum(int32(v)), nil\n}", "func (s *StorageClusterAPI) List(w http.ResponseWriter, r *http.Request) {\n\tclusters, err := s.storageClusterService.List()\n\tif err != nil {\n\t\tapi.Error(w, err)\n\t\treturn\n\t}\n\tapi.OK(w, clusters)\n}", "func (n NamespaceListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", n.NextLink)\n\tpopulate(objectMap, \"value\", n.Value)\n\treturn json.Marshal(objectMap)\n}", "func (v Userlist) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjson42239ddeEncodeGithubComKhliengDispatchServer4(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "func (r *SubscriptionsListServerResponse) marshal(writer io.Writer) error {\n\tvar err error\n\tencoder := json.NewEncoder(writer)\n\tdata := new(subscriptionsListServerResponseData)\n\tdata.Items, err = r.items.wrap()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdata.Page = r.page\n\tdata.Size = r.size\n\tdata.Total = r.total\n\terr = encoder.Encode(data)\n\treturn err\n}", "func (i IPAllocationListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", i.NextLink)\n\tpopulate(objectMap, \"value\", i.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s SecretList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (m ManagerEffectiveSecurityAdminRulesListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"skipToken\", m.SkipToken)\n\tpopulate(objectMap, \"value\", m.Value)\n\treturn json.Marshal(objectMap)\n}", "func (v GetCharactersCharacterIdNotificationsContacts200OkList) MarshalJSON() ([]byte, error) {\n\tw := jwriter.Writer{}\n\teasyjsonB63f3efcEncodeGithubComAntihaxGoesiEsi(&w, v)\n\treturn w.Buffer.BuildBytes(), w.Error\n}", "func (c *Cluster) Export(ctx context.Context) ([]byte, error) {\n\tvar config bytes.Buffer\n\n\tnamespaces, err := c.getAllowedAndExistingNamespaces(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"getting namespaces\")\n\t}\n\n\tencoder := yaml.NewEncoder(&config)\n\tdefer encoder.Close()\n\n\tfor _, ns := range namespaces {\n\t\tnamespace, err := c.client.CoreV1().Namespaces().Get(ctx, ns, meta_v1.GetOptions{})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// kind & apiVersion must be set, since TypeMeta is not populated\n\t\tnamespace.Kind = \"Namespace\"\n\t\tnamespace.APIVersion = \"v1\"\n\n\t\terr = encoder.Encode(yamlThroughJSON{namespace})\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"marshalling namespace to YAML\")\n\t\t}\n\n\t\tfor _, resourceKind := range resourceKinds {\n\t\t\tworkloads, err := resourceKind.getWorkloads(ctx, c, ns)\n\t\t\tif err != nil {\n\t\t\t\tswitch {\n\t\t\t\tcase apierrors.IsNotFound(err):\n\t\t\t\t\t// Kind not supported by API server, skip\n\t\t\t\t\tcontinue\n\t\t\t\tcase apierrors.IsForbidden(err):\n\t\t\t\t\t// K8s can return forbidden instead of not found for non super admins\n\t\t\t\t\tc.logger.Log(\"warning\", \"not allowed to list resources\", \"err\", err)\n\t\t\t\t\tcontinue\n\t\t\t\tdefault:\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfor _, pc := range workloads {\n\t\t\t\tif !isAddon(pc) {\n\t\t\t\t\tif err := encoder.Encode(yamlThroughJSON{pc.k8sObject}); err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn config.Bytes(), nil\n}", "func (d DiscoveryConfiguration) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"appLocation\", d.AppLocation)\n\tpopulate(objectMap, \"centralServerVmId\", d.CentralServerVMID)\n\tobjectMap[\"configurationType\"] = SAPConfigurationTypeDiscovery\n\tpopulate(objectMap, \"managedRgStorageAccountName\", d.ManagedRgStorageAccountName)\n\treturn json.Marshal(objectMap)\n}", "func (rd RegistrationDefinition) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif rd.Properties != nil {\n\t\tobjectMap[\"properties\"] = rd.Properties\n\t}\n\tif rd.Plan != nil {\n\t\tobjectMap[\"plan\"] = rd.Plan\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (m *DeviceCompliancePolicyCollectionResponse) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.BaseCollectionPaginationCountResponse.Serialize(writer)\n if err != nil {\n return err\n }\n if m.GetValue() != nil {\n cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetValue()))\n for i, v := range m.GetValue() {\n if v != nil {\n cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable)\n }\n }\n err = writer.WriteCollectionOfObjectValues(\"value\", cast)\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (s SyncSetList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (s SyncIdentityProviderList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func (sl SketchSeriesList) MarshalJSON() ([]byte, error) {\n\tdata := map[string][]*SketchSeries{\n\t\t\"sketch_series\": sl,\n\t}\n\treqBody := &bytes.Buffer{}\n\terr := json.NewEncoder(reqBody).Encode(data)\n\treturn reqBody.Bytes(), err\n}", "func (s SecretList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}" ]
[ "0.7999679", "0.62534887", "0.6130919", "0.58522373", "0.58267486", "0.55734366", "0.5223865", "0.52106845", "0.5146114", "0.51270634", "0.51243156", "0.5075795", "0.5042486", "0.50378895", "0.5033215", "0.5026041", "0.5025059", "0.50228757", "0.500711", "0.5006869", "0.49958384", "0.4992332", "0.4974203", "0.49299598", "0.49248603", "0.4910527", "0.49023625", "0.4895527", "0.48765358", "0.48726484", "0.48641878", "0.48601487", "0.48574626", "0.4855766", "0.48149416", "0.48055556", "0.47983012", "0.47953263", "0.47921482", "0.4785872", "0.47724876", "0.47548756", "0.47533414", "0.4752412", "0.4749242", "0.4748574", "0.47464025", "0.47457328", "0.47398722", "0.47324792", "0.4730634", "0.4728345", "0.47283205", "0.47279787", "0.47162262", "0.46924418", "0.4689903", "0.46862984", "0.4685566", "0.46816793", "0.4681507", "0.46732056", "0.46731573", "0.4668696", "0.46636653", "0.46563694", "0.46544948", "0.4649347", "0.46448067", "0.46371734", "0.46326354", "0.46302488", "0.46226606", "0.46046472", "0.46030015", "0.45908147", "0.4589558", "0.4589444", "0.45816278", "0.45796514", "0.45736334", "0.4572825", "0.45719707", "0.45643675", "0.45643383", "0.45616016", "0.4550825", "0.45497927", "0.4547909", "0.45436224", "0.45414785", "0.4537352", "0.4536004", "0.45330575", "0.45311916", "0.4530166", "0.45295876", "0.45160216", "0.45085946", "0.45079634" ]
0.82666796
0
writeClusterRegistrationList writes a list of value of the 'cluster_registration' type to the given stream.
func writeClusterRegistrationList(list []*ClusterRegistration, stream *jsoniter.Stream) { stream.WriteArrayStart() for i, value := range list { if i > 0 { stream.WriteMore() } writeClusterRegistration(value, stream) } stream.WriteArrayEnd() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func MarshalClusterRegistrationList(list []*ClusterRegistration, writer io.Writer) error {\n\tstream := helpers.NewStream(writer)\n\twriteClusterRegistrationList(list, stream)\n\tstream.Flush()\n\treturn stream.Error\n}", "func readClusterRegistrationList(iterator *jsoniter.Iterator) []*ClusterRegistration {\n\tlist := []*ClusterRegistration{}\n\tfor iterator.ReadArray() {\n\t\titem := readClusterRegistration(iterator)\n\t\tlist = append(list, item)\n\t}\n\treturn list\n}", "func UnmarshalClusterRegistrationList(source interface{}) (items []*ClusterRegistration, err error) {\n\titerator, err := helpers.NewIterator(source)\n\tif err != nil {\n\t\treturn\n\t}\n\titems = readClusterRegistrationList(iterator)\n\terr = iterator.Error\n\treturn\n}", "func (o OpenShiftClusterList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", o.NextLink)\n\tpopulate(objectMap, \"value\", o.Value)\n\treturn json.Marshal(objectMap)\n}", "func writeValueList(w io.Writer, byteOrder binary.ByteOrder, valueList []interface{}) (err error) {\n\tfor _, value := range valueList {\n\n\t\tswitch value.(type) {\n\t\tcase uint8, int8, bool, uint32, int32, uint64, int64, float32, float64:\n\t\t\t// fixed size\n\t\t\terr = binary.Write(w, byteOrder, value)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase string:\n\t\t\t// length prefix\n\t\t\tvar l uint32\n\t\t\tstr := value.(string)\n\t\t\tl = uint32(len(str))\n\t\t\terr = binary.Write(w, byteOrder, l)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// string data\n\t\t\t_, err = w.Write([]byte(str))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func WriteStringList(w io.Writer, l []string) error {\n\treturn writeStringList(NewCompression1Writer(w), l, nil, true, false)\n}", "func (p *PubKey) OutputList(to io.Writer) int {\n\tsb := new(strings.Builder)\n\tfor u, _ := range p.users {\n\t\tfor i, _ := range p.users[u].Keys {\n\t\t\tk := p.users[u].Keys[i]\n\t\t\tif k.Key != nil {\n\t\t\t\tsb.WriteString(fmt.Sprintf(\"%s %s\\n\", *k.Key, u))\n\t\t\t}\n\t\t}\n\t}\n\tfmt.Fprint(to, sb.String())\n\treturn 0\n}", "func (providerRegistrationManager *ProviderRegistrationManager) PostProviderRegistrationListSettings(request *restful.Request, response *restful.Response) {\n\tfullyQualifiedResourceID := engines.GetFullyQualifiedProviderRegistrationID(request)\n\n\t// Get Document from collection\n\tproviderRegistrationPackage := entities.ProviderRegistrationPackage{}\n\terr := providerRegistrationManager.ProviderRegistrationDataProvider.FindPackage(fullyQualifiedResourceID, &providerRegistrationPackage)\n\tif err != nil {\n\t\tapierror.WriteErrorToResponse(\n\t\t\tresponse,\n\t\t\thttp.StatusNotFound,\n\t\t\tapierror.ClientError,\n\t\t\tapierror.NotFound,\n\t\t\terr.Error())\n\t\treturn\n\t}\n\n\tresponseContent, err := json.Marshal(providerRegistrationPackage.ToListSettingsDefinition())\n\tif err != nil {\n\t\tapierror.WriteErrorToResponse(\n\t\t\tresponse,\n\t\t\thttp.StatusInternalServerError,\n\t\t\tapierror.InternalError,\n\t\t\tapierror.InternalOperationError,\n\t\t\tfmt.Sprintf(\"Failed to serialize provider registration package: %s\", err.Error()))\n\t\treturn\n\t}\n\tresponse.Header().Set(restful.HEADER_ContentType, restful.MIME_JSON)\n\tresponse.Write(responseContent)\n}", "func WriteSensuClusterConfig(newCluster []byte) error {\n\tfileloc, err := homedir.Expand(sensuctlClusterFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tioutil.WriteFile(fileloc, newCluster, 0644)\n\treturn nil\n}", "func WriteInstanceTemplateList(w io.Writer, instts ...templates.InstanceTemplateInterface) {\n\tt := output.NewListTable(w)\n\tt.SetHeader([]string{\n\t\t\"Name\",\n\t\t\"Scope\",\n\t\t\"Service Type\",\n\t\t\"Class\",\n\t\t\"Plan\",\n\t})\n\n\tfor _, instt := range instts {\n\n\t\tplan := instt.GetPlanReference()\n\t\tt.Append([]string{\n\t\t\tinstt.GetName(),\n\t\t\tgetScopeText(instt.GetScope(), instt.GetScopeName()),\n\t\t\tinstt.GetServiceType(),\n\t\t\tplan.ClusterServiceClassExternalName,\n\t\t\tplan.ClusterServicePlanExternalName,\n\t\t\t\"\",\n\t\t})\n\t}\n\n\tt.Render()\n}", "func writeGeneList(ctx context.Context, geneListOutputPath string, geneDB *fusion.GeneDB) {\n\tout, err := file.Create(ctx, geneListOutputPath)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\tw := bufio.NewWriter(out.Writer(ctx))\n\tmin, limit := geneDB.GeneIDRange()\n\ter := errors.Once{}\n\tn := 0\n\tfor id := min; id < limit; id++ {\n\t\tgene := geneDB.GeneInfo(id)\n\t\t_, err := w.WriteString(gene.Gene)\n\t\ter.Set(err)\n\t\ter.Set(w.WriteByte('\\n'))\n\t\tn++\n\t}\n\ter.Set(w.Flush())\n\ter.Set(out.Close(ctx))\n\tif er.Err() != nil {\n\t\tlog.Panic(err)\n\t}\n\tlog.Printf(\"Wrote %d genes to %s\", n, geneListOutputPath)\n}", "func writeList(prefix string, patterns []string) (string, error) {\n\tvar w *os.File\n\tvar err error\n\n\tif w, err = ioutil.TempFile(\"/tmp\", prefix); err != nil {\n\t\treturn \"\", fmt.Errorf(\"Error creating pattern file for %s list: %v\", prefix, err)\n\t}\n\tdefer w.Close()\n\tfor _, v := range patterns {\n\t\tfmt.Fprintln(w, v)\n\t}\n\treturn w.Name(), nil\n}", "func listClusters(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tctx := r.Context()\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tclusters, err := servicemanager.Cluster.List(ctx)\n\tif err != nil {\n\t\tif err == provTypes.ErrNoCluster {\n\t\t\tw.WriteHeader(http.StatusNoContent)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\tadmin := permission.Check(t, permission.PermClusterAdmin)\n\tif !admin {\n\t\tfor i := range clusters {\n\t\t\tclusters[i].CleanUpSensitive()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(clusters)\n}", "func (c *Client) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif err := c.initClient(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn c.stork.StorkV1alpha1().ApplicationRegistrations().List(context.TODO(), metav1.ListOptions{})\n\n}", "func (c *cache) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif c == nil || c.controllerCache == nil {\n\t\treturn nil, fmt.Errorf(cacheNotInitializedErr)\n\t}\n\tappRegList := &storkv1alpha1.ApplicationRegistrationList{}\n\tif err := c.controllerCache.List(context.Background(), appRegList); err != nil {\n\t\treturn nil, err\n\t}\n\treturn appRegList, nil\n}", "func (mb *client) WriteMultipleRegisters(address, quantity uint16, value []byte) (results []byte, err error) {\n\tif quantity < 1 || quantity > 123 {\n\t\terr = fmt.Errorf(\"modbus: quantity '%v' must be between '%v' and '%v',\", quantity, 1, 123)\n\t\treturn\n\t}\n\trequest := ProtocolDataUnit{\n\t\tFunctionCode: FuncCodeWriteMultipleRegisters,\n\t\tData: dataBlockSuffix(value, address, quantity),\n\t}\n\tresponse, err := mb.send(&request)\n\tif err != nil {\n\t\treturn\n\t}\n\t// Fixed response length\n\tif len(response.Data) != 4 {\n\t\terr = fmt.Errorf(\"modbus: response data size '%v' does not match expected '%v'\", len(response.Data), 4)\n\t\treturn\n\t}\n\trespValue := binary.BigEndian.Uint16(response.Data)\n\tif address != respValue {\n\t\terr = fmt.Errorf(\"modbus: response address '%v' does not match request '%v'\", respValue, address)\n\t\treturn\n\t}\n\tresults = response.Data[2:]\n\trespValue = binary.BigEndian.Uint16(results)\n\tif quantity != respValue {\n\t\terr = fmt.Errorf(\"modbus: response quantity '%v' does not match request '%v'\", respValue, quantity)\n\t\treturn\n\t}\n\treturn\n}", "func (m *Manager) ListCluster() error {\n\tclusters, err := m.GetClusterList()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tswitch m.logger.GetDisplayMode() {\n\tcase logprinter.DisplayModeJSON:\n\t\tclusterObj := struct {\n\t\t\tClusters []Cluster `json:\"clusters\"`\n\t\t}{\n\t\t\tClusters: clusters,\n\t\t}\n\t\tdata, err := json.Marshal(clusterObj)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(string(data))\n\tdefault:\n\t\tclusterTable := [][]string{\n\t\t\t// Header\n\t\t\t{\"Name\", \"User\", \"Version\", \"Path\", \"PrivateKey\"},\n\t\t}\n\t\tfor _, v := range clusters {\n\t\t\tclusterTable = append(clusterTable, []string{\n\t\t\t\tv.Name,\n\t\t\t\tv.User,\n\t\t\t\tv.Version,\n\t\t\t\tv.Path,\n\t\t\t\tv.PrivateKey,\n\t\t\t})\n\t\t}\n\t\ttui.PrintTable(clusterTable, true)\n\t}\n\treturn nil\n}", "func (o *WatchRbacAuthorizationV1ClusterRoleListOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(200)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (e *EventLogger) WriteSyncedLists() {\n\tdata, _ := json.Marshal(e.SyncedLists)\n\tif _, err := os.Stat(e.syncedListsConfigDir()); os.IsNotExist(err) {\n\t\tos.MkdirAll(e.syncedListsConfigDir(), os.ModePerm)\n\t\tif _, cerr := os.Create(e.syncedListsConfigDir()); cerr != nil {\n\t\t\tpanic(cerr)\n\t\t}\n\t}\n\n\tif err := ioutil.WriteFile(e.syncedListsFile(), data, 0644); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (o OpenShiftCluster) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"id\", o.ID)\n\tpopulate(objectMap, \"location\", o.Location)\n\tpopulate(objectMap, \"name\", o.Name)\n\tpopulate(objectMap, \"properties\", o.Properties)\n\tpopulate(objectMap, \"systemData\", o.SystemData)\n\tpopulate(objectMap, \"tags\", o.Tags)\n\tpopulate(objectMap, \"type\", o.Type)\n\treturn json.Marshal(objectMap)\n}", "func (svc ServerlessClusterService) List(ctx context.Context) (*[]models.Cluster, *Response, error) {\n\tvar clusterList []models.Cluster\n\tgraphqlRequest := models.GraphqlRequest{\n\t\tName: \"clusters\",\n\t\tOperation: models.Query,\n\t\tInput: nil,\n\t\tArgs: models.ClusterListInput{\n\t\t\tProductType: models.Starter,\n\t\t},\n\t\tResponse: clusterList,\n\t}\n\treq, err := svc.client.NewRequest(&graphqlRequest)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tresp, err := svc.client.Do(ctx, req, &clusterList)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn &clusterList, resp, err\n}", "func (c Cluster) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif c.Identity != nil {\n\t\tobjectMap[\"identity\"] = c.Identity\n\t}\n\tif c.Sku != nil {\n\t\tobjectMap[\"sku\"] = c.Sku\n\t}\n\tif c.ClusterProperties != nil {\n\t\tobjectMap[\"properties\"] = c.ClusterProperties\n\t}\n\tif c.Tags != nil {\n\t\tobjectMap[\"tags\"] = c.Tags\n\t}\n\tif c.Location != nil {\n\t\tobjectMap[\"location\"] = c.Location\n\t}\n\treturn json.Marshal(objectMap)\n}", "func (w *Writer) WriteList(lst *list.List) {\n\tptr := unsafe.Pointer(lst)\n\ttyp := reflect.TypeOf(lst)\n\tif writeRef(w, ptr, typ) {\n\t\treturn\n\t}\n\tsetWriterRef(w, ptr, typ)\n\tcount := lst.Len()\n\tif count == 0 {\n\t\twriteEmptyList(w)\n\t\treturn\n\t}\n\twriteListHeader(w, count)\n\tfor e := lst.Front(); e != nil; e = e.Next() {\n\t\tw.Serialize(e.Value)\n\t}\n\twriteListFooter(w)\n}", "func WriteList(user string, items []string) {\n\tlist := ReadList()\n\tlist[user] = append(list[user], items...)\n\tjsonAsBytes, err := json.Marshal(list)\n\tif err != nil {\n\t\t//TODO: Fix this error handling\n\t\tpanic(err)\n\t}\n\terr = ioutil.WriteFile(listPath, jsonAsBytes, 0644)\n\tif err != nil {\n\t\t//TODO: Fix this error handling\n\t\tpanic(err)\n\t}\n}", "func ListAllRegistrations(w http.ResponseWriter, r *http.Request) {\n\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\tname := r.URL.Query().Get(\"name\")\n\tstatus := r.URL.Query().Get(\"status\")\n\temail := r.URL.Query().Get(\"email\")\n\torg := r.URL.Query().Get(\"organization\")\n\tactivationToken := r.URL.Query().Get(\"activation_token\")\n\n\tur, err := auth.FindUserRegistrations(status, activationToken, name, email, org, refStr)\n\tif err != nil {\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\turb, err := json.MarshalIndent(ur, \"\", \" \")\n\tif err != nil {\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\trespondOK(w, urb)\n}", "func (cr CommonWriter) WriteList(op thrift.TProtocol, lIst *idltypes.List, data []interface{}) error {\n\tif err := op.WriteListBegin(thrift.LIST, len(data)); err != nil {\n\t\treturn fmt.Errorf(\"error writing list begin: %s\", err)\n\t}\n\tfor _, v := range data {\n\t\tif err := cr.writeFieldValue(op, v, lIst.ValueType()); err != nil {\n\t\t\treturn fmt.Errorf(\"%s field write error: %s\", lIst.ValueType().Name(), err)\n\t\t}\n\t}\n\tif err := op.WriteListEnd(); err != nil {\n\t\treturn fmt.Errorf(\"error writing list end: %s\", err)\n\t}\n\treturn nil\n}", "func (c cluster) WriteOrder(hash string) []node {\n\treturn hashOrder(hash, len(c.GetNeighbors())+1, c.WriteRing())\n}", "func (client DeploymentsClient) ListForClusterSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}", "func Convert_v1alpha1_ControllerRegistrationList_To_core_ControllerRegistrationList(in *ControllerRegistrationList, out *core.ControllerRegistrationList, s conversion.Scope) error {\n\treturn autoConvert_v1alpha1_ControllerRegistrationList_To_core_ControllerRegistrationList(in, out, s)\n}", "func WriteStringListIndex(w io.Writer, l []string) error {\n\treturn writeStringList(NewCompression1Writer(w), l, func(i int, b byte) byte {\n\t\treturn ^b - byte(i%5)\n\t}, true, false)\n}", "func provisionerList(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tprovs, err := provision.Registry()\n\tif err != nil {\n\t\treturn err\n\t}\n\tinfo := make([]provisionerInfo, len(provs))\n\tfor i, p := range provs {\n\t\tinfo[i].Name = p.GetName()\n\t\tif clusterProv, ok := p.(cluster.ClusteredProvisioner); ok {\n\t\t\tinfo[i].ClusterHelp = clusterProv.ClusterHelp()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(info)\n}", "func SerializeKubeCluster(cluster types.KubeCluster) ([]byte, error) {\n\ta := struct {\n\t\tName string `yaml:\"name\"`\n\t\tKind string `yaml:\"kind\"`\n\t\tSubKind string `yaml:\"subkind\"`\n\t\tLabels map[string]string `yaml:\"labels\"`\n\t}{\n\t\tName: cluster.GetName(),\n\t\tKind: types.KindKubernetesCluster,\n\t\tSubKind: cluster.GetSubKind(),\n\t\tLabels: cluster.GetAllLabels(),\n\t}\n\ttext, err := yaml.Marshal(&a)\n\treturn text, trace.Wrap(err)\n}", "func ListToWriter(w io.Writer, values []Dec64) (err error) {\n\t// writing buffer\n\t// size as a Dec64 aka int64\n\tbuff := make([]byte, 8)\n\tfor _, v := range values {\n\t\tbinary.LittleEndian.PutUint64(buff, uint64(v))\n\t\t_, err = w.Write(buff)\n\t\tif err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\treturn\n}", "func (adm Admin) ListInstances(cluster string) (string, error) {\n\tconn := newConnection(adm.ZkSvr)\n\terr := conn.Connect()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer conn.Disconnect()\n\n\t// make sure the cluster is already setup\n\tif ok, err := conn.IsClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tkeys := KeyBuilder{cluster}\n\tisPath := keys.instances()\n\tinstances, err := conn.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(fmt.Sprintf(\"Existing instances in cluster %s:\\n\", cluster))\n\n\tfor _, r := range instances {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\treturn buffer.String(), nil\n}", "func (s *StorageClusterAPI) List(w http.ResponseWriter, r *http.Request) {\n\tclusters, err := s.storageClusterService.List()\n\tif err != nil {\n\t\tapi.Error(w, err)\n\t\treturn\n\t}\n\tapi.OK(w, clusters)\n}", "func (adm Admin) ListInstances(cluster string) (string, error) {\n\t// make sure the cluster is already setup\n\tif ok, err := adm.isClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tbuilder := KeyBuilder{cluster}\n\tisPath := builder.instances()\n\tinstances, err := adm.zkClient.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(fmt.Sprintf(\"Existing instances in cluster %s:\\n\", cluster))\n\n\tfor _, r := range instances {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\treturn buffer.String(), nil\n}", "func (c *Client) WriteRegisters(threadID int, regs Registers) error {\n\tdata, err := c.readRegisters(threadID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// The 'P' command is not used due to the bug explained here: https://github.com/llvm-mirror/lldb/commit/d8d7a40ca5377aa777e3840f3e9b6a63c6b09445\n\n\tfor _, metadata := range c.registerMetadataList {\n\t\tprefix := data[0 : metadata.offset*2]\n\t\tsuffix := data[(metadata.offset+metadata.size)*2:]\n\n\t\tvar err error\n\t\tswitch metadata.name {\n\t\tcase \"rip\":\n\t\t\tdata = fmt.Sprintf(\"%s%s%s\", prefix, uint64ToHex(regs.Rip, true), suffix)\n\t\tcase \"rsp\":\n\t\t\tdata = fmt.Sprintf(\"%s%s%s\", prefix, uint64ToHex(regs.Rsp, true), suffix)\n\t\tcase \"rcx\":\n\t\t\tdata = fmt.Sprintf(\"%s%s%s\", prefix, uint64ToHex(regs.Rcx, true), suffix)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tcommand := fmt.Sprintf(\"G%s;thread:%x;\", data, threadID)\n\tif err := c.send(command); err != nil {\n\t\treturn err\n\t}\n\n\treturn c.receiveAndCheck()\n}", "func (adm Admin) ListClusterInfo(cluster string) (string, error) {\n\t// make sure the cluster is already setup\n\tif ok, err := adm.isClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tbuilder := KeyBuilder{cluster}\n\tisPath := builder.idealStates()\n\tinstancesPath := builder.instances()\n\n\tresources, err := adm.zkClient.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinstances, err := adm.zkClient.Children(instancesPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Existing resources in cluster \" + cluster + \":\\n\")\n\n\tfor _, r := range resources {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\tbuffer.WriteString(\"\\nInstances in cluster \" + cluster + \":\\n\")\n\tfor _, i := range instances {\n\t\tbuffer.WriteString(\" \" + i + \"\\n\")\n\t}\n\treturn buffer.String(), nil\n}", "func NewSQLServerRegistrationListResultIterator(page SQLServerRegistrationListResultPage) SQLServerRegistrationListResultIterator {\n\treturn SQLServerRegistrationListResultIterator{page: page}\n}", "func (adm Admin) ListClusterInfo(cluster string) (string, error) {\n\tconn := newConnection(adm.ZkSvr)\n\terr := conn.Connect()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer conn.Disconnect()\n\n\t// make sure the cluster is already setup\n\tif ok, err := conn.IsClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tkeys := KeyBuilder{cluster}\n\tisPath := keys.idealStates()\n\tinstancesPath := keys.instances()\n\n\tresources, err := conn.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinstances, err := conn.Children(instancesPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Existing resources in cluster \" + cluster + \":\\n\")\n\n\tfor _, r := range resources {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\tbuffer.WriteString(\"\\nInstances in cluster \" + cluster + \":\\n\")\n\tfor _, i := range instances {\n\t\tbuffer.WriteString(\" \" + i + \"\\n\")\n\t}\n\treturn buffer.String(), nil\n}", "func (c starterClusterServiceOp) List(ctx context.Context) (*[]models.Cluster, *Response, error) {\n\tvar clusterList []models.Cluster\n\tgraphqlRequest := models.GraphqlRequest{\n\t\tName: \"clusters\",\n\t\tOperation: models.Query,\n\t\tInput: clusterList,\n\t\tArgs: models.ClusterListInput{\n\t\t\tProductType: models.Starter,\n\t\t},\n\t\tResponse: clusterList,\n\t}\n\treq, err := c.client.NewRequest(&graphqlRequest)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tresp, err := c.client.Do(ctx, req, &clusterList)\n\tif err != nil {\n\t\treturn nil, resp, err\n\t}\n\n\treturn &clusterList, resp, err\n}", "func ListOneRegistration(w http.ResponseWriter, r *http.Request) {\n\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\tregUUID := urlVars[\"uuid\"]\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\tur, err := auth.FindUserRegistration(regUUID, \"\", refStr)\n\tif err != nil {\n\n\t\tif err.Error() == \"not found\" {\n\t\t\terr := APIErrorNotFound(\"User registration\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\turb, err := json.MarshalIndent(ur, \"\", \" \")\n\tif err != nil {\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\trespondOK(w, urb)\n}", "func (*OktetoClusterHelper) List() (map[string]string, error) {\n\treturn nil, ErrNotImplemented\n}", "func createCluster(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tctx := r.Context()\n\tallowed := permission.Check(t, permission.PermClusterCreate)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\n\terr = deprecateFormContentType(r)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar provCluster provTypes.Cluster\n\terr = ParseJSON(r, &provCluster)\n\tif err != nil {\n\t\treturn err\n\t}\n\tevt, err := event.New(&event.Opts{\n\t\tTarget: event.Target{Type: event.TargetTypeCluster, Value: provCluster.Name},\n\t\tKind: permission.PermClusterCreate,\n\t\tOwner: t,\n\t\tRemoteAddr: r.RemoteAddr,\n\t\tCustomData: event.FormToCustomData(InputFields(r)),\n\t\tAllowed: event.Allowed(permission.PermClusterReadEvents),\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() { evt.Done(err) }()\n\t_, err = servicemanager.Cluster.FindByName(ctx, provCluster.Name)\n\tif err == nil {\n\t\treturn &tsuruErrors.HTTP{\n\t\t\tCode: http.StatusConflict,\n\t\t\tMessage: \"cluster already exists\",\n\t\t}\n\t}\n\tfor _, poolName := range provCluster.Pools {\n\t\t_, err = pool.GetPoolByName(ctx, poolName)\n\t\tif err != nil {\n\t\t\tif err == pool.ErrPoolNotFound {\n\t\t\t\treturn &tsuruErrors.HTTP{\n\t\t\t\t\tCode: http.StatusNotFound,\n\t\t\t\t\tMessage: err.Error(),\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn err\n\t\t}\n\t}\n\tstreamResponse := strings.HasPrefix(r.Header.Get(\"Accept\"), \"application/x-json-stream\")\n\tif streamResponse {\n\t\tw.Header().Set(\"Content-Type\", \"application/x-json-stream\")\n\t\tkeepAliveWriter := tsuruIo.NewKeepAliveWriter(w, 30*time.Second, \"\")\n\t\tdefer keepAliveWriter.Stop()\n\t\twriter := &tsuruIo.SimpleJsonMessageEncoderWriter{Encoder: json.NewEncoder(keepAliveWriter)}\n\t\tevt.SetLogWriter(writer)\n\t}\n\tprovCluster.Writer = evt\n\terr = servicemanager.Cluster.Create(ctx, provCluster)\n\tif err != nil {\n\t\treturn errors.WithStack(err)\n\t}\n\treturn nil\n}", "func NewSQLServerRegistrationListResultPage(getNextPage func(context.Context, SQLServerRegistrationListResult) (SQLServerRegistrationListResult, error)) SQLServerRegistrationListResultPage {\n\treturn SQLServerRegistrationListResultPage{fn: getNextPage}\n}", "func (d *GossipSystem) ListNodes() ([]byte, error) {\n\tnodes := []*Node{}\n\tfor _, m := range d.list.Members() {\n\t\tmeta := &MetaData{}\n\t\tif err := json.Unmarshal(m.Meta, meta); err != nil {\n\t\t\tglog.Warningf(\"unmarshal meta failed: %v\", err)\n\t\t\tcontinue\n\t\t}\n\t\tnodes = append(nodes, &Node{Name: m.Name, IP: m.Addr.String(), GossipPort: uint16(m.Port), Meta: meta})\n\t}\n\n\tdata, err := json.Marshal(nodes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn data, nil\n}", "func (s *Services) List(q *QueryOptions) ([]*ServiceRegistrationListStub, *QueryMeta, error) {\n\tvar resp []*ServiceRegistrationListStub\n\tqm, err := s.client.query(\"/v1/services\", &resp, q)\n\tif err != nil {\n\t\treturn nil, qm, err\n\t}\n\treturn resp, qm, nil\n}", "func (w *Writer) WriteSlice(slice []reflect.Value) {\n\tsetWriterRef(w, nil, nil)\n\tcount := len(slice)\n\tif count == 0 {\n\t\twriteEmptyList(w)\n\t\treturn\n\t}\n\twriteListHeader(w, count)\n\tfor i := range slice {\n\t\tw.WriteValue(slice[i])\n\t}\n\twriteListFooter(w)\n}", "func (hub *Hub) BroadcastUserList() {\n\tvar connectedClients []string\n\tfor id, client := range hub.clientMap {\n\t\tif client.state == 0 {\n\t\t\tdelete(hub.clientMap, id)\n\t\t} else {\n\t\t\tconnectedClients = append(connectedClients, fmt.Sprintf(\"%s-%s\", id, client.name))\n\t\t}\n\t}\n\n\tfor _, client := range hub.clientMap {\n\t\tclient.outboundMessage <- UserListMessage(connectedClients).ToString()\n\t}\n}", "func (d *StaticSystem) ListNodes() ([]byte, error) {\n\tif d.role != NodeRoleMaster {\n\t\treturn nil, ErrNotMaster\n\t}\n\n\td.Lock()\n\tdefer d.Unlock()\n\n\tnodes := []*Node{}\n\tfor _, n := range d.nodes {\n\t\tnodes = append(nodes, n)\n\t}\n\n\tdata, err := json.Marshal(nodes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn data, nil\n}", "func (m *WindowsInformationProtectionDeviceRegistration) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {\n err := m.Entity.Serialize(writer)\n if err != nil {\n return err\n }\n {\n err = writer.WriteStringValue(\"deviceMacAddress\", m.GetDeviceMacAddress())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceName\", m.GetDeviceName())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceRegistrationId\", m.GetDeviceRegistrationId())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"deviceType\", m.GetDeviceType())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteTimeValue(\"lastCheckInDateTime\", m.GetLastCheckInDateTime())\n if err != nil {\n return err\n }\n }\n {\n err = writer.WriteStringValue(\"userId\", m.GetUserId())\n if err != nil {\n return err\n }\n }\n return nil\n}", "func (ds *DiscoveryService) ListClusters(request *restful.Request, response *restful.Response) {\n\tkey := request.Request.URL.String()\n\tout, cached := ds.cdsCache.cachedDiscoveryResponse(key)\n\tif !cached {\n\t\tif sc := request.PathParameter(ServiceCluster); sc != ds.mesh.IstioServiceCluster {\n\t\t\terrorResponse(response, http.StatusNotFound,\n\t\t\t\tfmt.Sprintf(\"Unexpected %s %q\", ServiceCluster, sc))\n\t\t\treturn\n\t\t}\n\n\t\t// service-node holds the IP address\n\t\tip := request.PathParameter(ServiceNode)\n\t\t// CDS computes clusters that are referenced by RDS routes for a particular proxy node\n\t\t// TODO: this implementation is inefficient as it is recomputing all the routes for all proxies\n\t\t// There is a lot of potential to cache and reuse cluster definitions across proxies and also\n\t\t// skip computing the actual HTTP routes\n\t\tinstances := ds.services.HostInstances(map[string]bool{ip: true})\n\t\tservices := ds.services.Services()\n\t\thttpRouteConfigs := buildOutboundHTTPRoutes(instances, services, &ProxyContext{\n\t\t\tDiscovery: ds.services,\n\t\t\tConfig: ds.config,\n\t\t\tMeshConfig: ds.mesh,\n\t\t\tIPAddress: ip,\n\t\t})\n\n\t\t// de-duplicate and canonicalize clusters\n\t\tclusters := httpRouteConfigs.clusters().normalize()\n\n\t\t// apply custom policies for HTTP clusters\n\t\tfor _, cluster := range clusters {\n\t\t\tinsertDestinationPolicy(ds.config, cluster)\n\t\t}\n\n\t\tvar err error\n\t\tif out, err = json.MarshalIndent(ClusterManager{Clusters: clusters}, \" \", \" \"); err != nil {\n\t\t\terrorResponse(response, http.StatusInternalServerError, err.Error())\n\t\t\treturn\n\t\t}\n\t\tds.cdsCache.updateCachedDiscoveryResponse(key, out)\n\t}\n\twriteResponse(response, out)\n}", "func EncodeClusterSpec(spec *VsphereClusterProviderConfig) (*runtime.RawExtension, error) {\n\tif spec == nil {\n\t\treturn &runtime.RawExtension{}, nil\n\t}\n\n\tvar rawBytes []byte\n\tvar err error\n\n\t// TODO: use apimachinery conversion https://godoc.org/k8s.io/apimachinery/pkg/runtime#Convert_runtime_Object_To_runtime_RawExtension\n\tif rawBytes, err = json.Marshal(spec); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &runtime.RawExtension{\n\t\tRaw: rawBytes,\n\t\tObject: spec,\n\t}, nil\n}", "func (s *Stackdriver) Write(metrics []telegraf.Metric) error {\n\tmetricBatch := make(map[int64][]telegraf.Metric)\n\ttimestamps := []int64{}\n\tfor _, metric := range sorted(metrics) {\n\t\ttimestamp := metric.Time().UnixNano()\n\t\tif existingSlice, ok := metricBatch[timestamp]; ok {\n\t\t\tmetricBatch[timestamp] = append(existingSlice, metric)\n\t\t} else {\n\t\t\tmetricBatch[timestamp] = []telegraf.Metric{metric}\n\t\t\ttimestamps = append(timestamps, timestamp)\n\t\t}\n\t}\n\n\t// sort the timestamps we collected\n\tsort.Slice(timestamps, func(i, j int) bool { return timestamps[i] < timestamps[j] })\n\n\ts.Log.Debugf(\"received %d metrics\\n\", len(metrics))\n\ts.Log.Debugf(\"split into %d groups by timestamp\\n\", len(metricBatch))\n\tfor _, timestamp := range timestamps {\n\t\tif err := s.sendBatch(metricBatch[timestamp]); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (a *Client) ListRegistry(params *ListRegistryParams) (*ListRegistryOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewListRegistryParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"listRegistry\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/Registries\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &ListRegistryReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*ListRegistryOK), nil\n\n}", "func (c *MultiClusterController) List(clusterName string, opts ...client.ListOption) (interface{}, error) {\n\tcluster := c.GetCluster(clusterName)\n\tif cluster == nil {\n\t\treturn nil, errors.NewClusterNotFound(clusterName)\n\t}\n\tinstanceList := utilscheme.Scheme.NewObjectList(c.objectType)\n\tdelegatingClient, err := cluster.GetDelegatingClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = delegatingClient.List(context.TODO(), instanceList, opts...)\n\treturn instanceList, err\n}", "func (c *Cluster) Export(ctx context.Context) ([]byte, error) {\n\tvar config bytes.Buffer\n\n\tnamespaces, err := c.getAllowedAndExistingNamespaces(ctx)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"getting namespaces\")\n\t}\n\n\tencoder := yaml.NewEncoder(&config)\n\tdefer encoder.Close()\n\n\tfor _, ns := range namespaces {\n\t\tnamespace, err := c.client.CoreV1().Namespaces().Get(ctx, ns, meta_v1.GetOptions{})\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// kind & apiVersion must be set, since TypeMeta is not populated\n\t\tnamespace.Kind = \"Namespace\"\n\t\tnamespace.APIVersion = \"v1\"\n\n\t\terr = encoder.Encode(yamlThroughJSON{namespace})\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"marshalling namespace to YAML\")\n\t\t}\n\n\t\tfor _, resourceKind := range resourceKinds {\n\t\t\tworkloads, err := resourceKind.getWorkloads(ctx, c, ns)\n\t\t\tif err != nil {\n\t\t\t\tswitch {\n\t\t\t\tcase apierrors.IsNotFound(err):\n\t\t\t\t\t// Kind not supported by API server, skip\n\t\t\t\t\tcontinue\n\t\t\t\tcase apierrors.IsForbidden(err):\n\t\t\t\t\t// K8s can return forbidden instead of not found for non super admins\n\t\t\t\t\tc.logger.Log(\"warning\", \"not allowed to list resources\", \"err\", err)\n\t\t\t\t\tcontinue\n\t\t\t\tdefault:\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tfor _, pc := range workloads {\n\t\t\t\tif !isAddon(pc) {\n\t\t\t\t\tif err := encoder.Encode(yamlThroughJSON{pc.k8sObject}); err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn config.Bytes(), nil\n}", "func (n *Node) WriteClusterSettings(settings *ClusterSettings) error {\n\ts, err := ksigsyaml.Marshal(*settings)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to encode %s\", clusterSettingsPath)\n\t}\n\terr = n.Command(\n\t\t\"mkdir\", \"-p\", filepath.Dir(clusterSettingsPath),\n\t).Silent().Run()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to write %s\", clusterSettingsPath)\n\t}\n\tif err := n.WriteFile(clusterSettingsPath, s); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (ssr SQLServerRegistration) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif ssr.SQLServerRegistrationProperties != nil {\n\t\tobjectMap[\"properties\"] = ssr.SQLServerRegistrationProperties\n\t}\n\tif ssr.Location != nil {\n\t\tobjectMap[\"location\"] = ssr.Location\n\t}\n\tif ssr.Tags != nil {\n\t\tobjectMap[\"tags\"] = ssr.Tags\n\t}\n\treturn json.Marshal(objectMap)\n}", "func writeListOfEndpoints(w http.ResponseWriter, r *http.Request, funcMap map[string]*RPCFunc) {\n\thasArgs := make(map[string]string)\n\tnoArgs := make(map[string]string)\n\tfor name, rf := range funcMap {\n\t\tbase := fmt.Sprintf(\"//%s/%s\", r.Host, name)\n\t\tif len(rf.args) == 0 {\n\t\t\tnoArgs[name] = base\n\t\t\tcontinue\n\t\t}\n\t\tvar query []string\n\t\tfor _, arg := range rf.args {\n\t\t\tquery = append(query, arg.name+\"=_\")\n\t\t}\n\t\thasArgs[name] = base + \"?\" + strings.Join(query, \"&\")\n\t}\n\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t_ = listOfEndpoints.Execute(w, map[string]map[string]string{\n\t\t\"NoArgs\": noArgs,\n\t\t\"HasArgs\": hasArgs,\n\t})\n}", "func (client *Client) ListNamespacedConfigMapsWithChan(request *ListNamespacedConfigMapsRequest) (<-chan *ListNamespacedConfigMapsResponse, <-chan error) {\n\tresponseChan := make(chan *ListNamespacedConfigMapsResponse, 1)\n\terrChan := make(chan error, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tdefer close(responseChan)\n\t\tdefer close(errChan)\n\t\tresponse, err := client.ListNamespacedConfigMaps(request)\n\t\tif err != nil {\n\t\t\terrChan <- err\n\t\t} else {\n\t\t\tresponseChan <- response\n\t\t}\n\t})\n\tif err != nil {\n\t\terrChan <- err\n\t\tclose(responseChan)\n\t\tclose(errChan)\n\t}\n\treturn responseChan, errChan\n}", "func WriteTransList(chain uint64, transList []Hash) error {\n\thk := GetHashOfTransList(transList)\n\tdata := TransListToBytes(transList)\n\treturn runtime.AdminDbSet(dbTransList{}, chain, hk[:], data, 2<<50)\n}", "func RegisterCluster(\n\tctx context.Context,\n\tbinaryName string,\n\tflags *pflag.FlagSet,\n\tclientsFactory common.ClientsFactory,\n\tkubeClientsFactory common.KubeClientsFactory,\n\topts *options.Options,\n\tout io.Writer,\n\tkubeLoader common_config.KubeLoader,\n) error {\n\n\tif err := cluster_internal.VerifyRemoteContextFlags(opts); err != nil {\n\t\treturn err\n\t}\n\n\tif err := cluster_internal.VerifyMasterCluster(clientsFactory, opts); err != nil {\n\t\treturn err\n\t}\n\n\tregisterOpts := opts.Cluster.Register\n\n\t// set up kube clients for the master cluster\n\tmasterCfg, err := kubeLoader.GetRestConfigForContext(opts.Root.KubeConfig, opts.Root.KubeContext)\n\tif err != nil {\n\t\treturn common.FailedLoadingMasterConfig(err)\n\t}\n\tmasterKubeClients, err := kubeClientsFactory(masterCfg, opts.Root.WriteNamespace)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set up kube clients for the remote cluster\n\n\t// default the remote kube config/context to the root settings\n\tremoteConfigPath, remoteContext := opts.Root.KubeConfig, opts.Root.KubeContext\n\tif registerOpts.RemoteKubeConfig != \"\" {\n\t\t// if we specified a kube config for the remote cluster, use that instead\n\t\tremoteConfigPath = registerOpts.RemoteKubeConfig\n\t}\n\n\t// if we didn't have a context from the root, or if we had an override for the\n\t// remote context, use the remote context instead\n\tif remoteContext == \"\" || registerOpts.RemoteContext != \"\" {\n\t\tremoteContext = registerOpts.RemoteContext\n\t}\n\n\tremoteCfg, err := kubeLoader.GetRestConfigForContext(remoteConfigPath, remoteContext)\n\tif err != nil {\n\t\treturn FailedLoadingRemoteConfig(err)\n\t}\n\tremoteKubeClients, err := kubeClientsFactory(remoteCfg, registerOpts.RemoteWriteNamespace)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// if overwrite returns ok than the program should continue, else return\n\t// The reason for the 2 return vars is that err may be nil and returned anyway\n\tif ok, err := shouldOverwrite(ctx, binaryName, flags, out, opts, masterKubeClients); !ok {\n\t\treturn err\n\t}\n\n\tif err = ensureRemoteNamespace(ctx, registerOpts.RemoteWriteNamespace, remoteKubeClients); err != nil {\n\t\treturn err\n\t}\n\n\tbearerTokenForServiceAccount, err := generateServiceAccountBearerToken(ctx, out, remoteKubeClients, remoteCfg, registerOpts)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(out, \"Successfully wrote service account to remote cluster...\\n\")\n\n\tclients, err := clientsFactory(opts)\n\tif err != nil {\n\t\treturn eris.Errorf(\"Unexpected error: Clients should have already been built, but failed to build again\")\n\t}\n\t// Install CRDs to remote cluster. This must happen BEFORE kubeconfig Secret is written to master cluster because\n\t// relevant CRDs must exist before SMH attempts any cross cluster functionality.\n\tcsrAgentInstaller := clients.ClusterRegistrationClients.CsrAgentInstallerFactory(remoteKubeClients.HelmInstaller, masterKubeClients.DeployedVersionFinder)\n\terr = csrAgentInstaller.Install(\n\t\tctx,\n\t\t&csr.CsrAgentInstallOptions{\n\t\t\tKubeConfig: remoteConfigPath,\n\t\t\tKubeContext: remoteContext,\n\t\t\tClusterName: registerOpts.RemoteClusterName,\n\t\t\tSmhInstallNamespace: opts.Root.WriteNamespace,\n\t\t\tUseDevCsrAgentChart: registerOpts.UseDevCsrAgentChart,\n\t\t\tReleaseName: cliconstants.CsrAgentReleaseName,\n\t\t\tRemoteWriteNamespace: registerOpts.RemoteWriteNamespace,\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(out, \"Successfully set up CSR agent...\\n\")\n\n\t// Write kubeconfig Secret and KubeCluster CRD to master cluster\n\tsecret, err := writeKubeConfigToMaster(\n\t\tctx,\n\t\topts.Root.WriteNamespace,\n\t\tregisterOpts,\n\t\tremoteConfigPath,\n\t\tbearerTokenForServiceAccount,\n\t\tmasterKubeClients,\n\t\tkubeLoader,\n\t\tclients.KubeConverter,\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Fprintf(out, \"Successfully wrote kube config secret to master cluster...\\n\")\n\terr = writeKubeClusterToMaster(ctx, masterKubeClients, opts.Root.WriteNamespace, registerOpts, secret)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Fprintf(\n\t\tout,\n\t\t\"\\nCluster %s is now registered in your Service Mesh Hub installation\\n\",\n\t\tregisterOpts.RemoteClusterName,\n\t)\n\n\treturn nil\n}", "func (r *NodeRegistry) List() []ipfs.NodeInfo {\n\tvar (\n\t\tnodes = make([]ipfs.NodeInfo, len(r.nodes))\n\t\ti = 0\n\t)\n\n\tr.nm.RLock()\n\tfor _, n := range r.nodes {\n\t\tnodes[i] = *n\n\t\ti++\n\t}\n\tr.nm.RUnlock()\n\n\treturn nodes\n}", "func IPListWriteFromCIDR(cidrStr string) error {\n\t_, ipnet, err := net.ParseCIDR(cidrStr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmask := binary.BigEndian.Uint32(ipnet.Mask)\n\tstart := binary.BigEndian.Uint32(ipnet.IP)\n\tend := (start & mask) | (mask ^ 0xffffffff)\n\n\tfor i := start; i <= end; i++ {\n\t\tip := make(net.IP, 4)\n\t\tbinary.BigEndian.PutUint32(ip, i)\n\t\tfmt.Println(ip)\n\t}\n\n\treturn nil\n}", "func (o *ListInstancesParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.Cluster != nil {\n\n\t\t// query param cluster\n\t\tvar qrCluster string\n\t\tif o.Cluster != nil {\n\t\t\tqrCluster = *o.Cluster\n\t\t}\n\t\tqCluster := qrCluster\n\t\tif qCluster != \"\" {\n\t\t\tif err := r.SetQueryParam(\"cluster\", qCluster); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif o.Status != nil {\n\n\t\t// query param status\n\t\tvar qrStatus string\n\t\tif o.Status != nil {\n\t\t\tqrStatus = *o.Status\n\t\t}\n\t\tqStatus := qrStatus\n\t\tif qStatus != \"\" {\n\t\t\tif err := r.SetQueryParam(\"status\", qStatus); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (s SubscriptionFeatureRegistrationList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", s.NextLink)\n\tpopulate(objectMap, \"value\", s.Value)\n\treturn json.Marshal(objectMap)\n}", "func replaceClusterMembersConfigMap(ctx context.Context, centralClusterClient kubernetes.Interface, flags flags) error {\n\tmembers := corev1.ConfigMap{\n\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\tName: defaultOperatorConfigMapName,\n\t\t\tNamespace: flags.centralClusterNamespace,\n\t\t\tLabels: multiClusterLabels(),\n\t\t},\n\t\tData: map[string]string{},\n\t}\n\n\taddToSet(flags.memberClusters, &members)\n\n\tfmt.Printf(\"Creating Member list Configmap %s/%s in cluster %s\\n\", flags.centralClusterNamespace, defaultOperatorConfigMapName, flags.centralCluster)\n\t_, err := centralClusterClient.CoreV1().ConfigMaps(flags.centralClusterNamespace).Create(ctx, &members, metav1.CreateOptions{})\n\n\tif err != nil && !errors.IsAlreadyExists(err) {\n\t\treturn xerrors.Errorf(\"failed creating secret: %w\", err)\n\t}\n\n\tif errors.IsAlreadyExists(err) {\n\t\tif _, err := centralClusterClient.CoreV1().ConfigMaps(flags.centralClusterNamespace).Update(ctx, &members, metav1.UpdateOptions{}); err != nil {\n\t\t\treturn xerrors.Errorf(\"error creating configmap: %w\", err)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (h *NotificationHub) Registrations(ctx context.Context) (raw []byte, registrations *Registrations, err error) {\n\traw, _, err = h.exec(ctx, getMethod, h.generateAPIURL(\"registrations\"), Headers{}, nil)\n\tif err != nil {\n\t\treturn\n\t}\n\tif err = xml.Unmarshal(raw, &registrations); err != nil {\n\t\treturn\n\t}\n\tregistrations.normalize()\n\treturn\n}", "func List(ctx context.Context, client *v1.ServiceClient, clusterID string) ([]*View, *v1.ResponseResult, error) {\n\turl := strings.Join([]string{client.Endpoint, v1.ResourceURLCluster, clusterID, v1.ResourceURLNodegroup}, \"/\")\n\tresponseResult, err := client.DoRequest(ctx, http.MethodGet, url, nil)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tif responseResult.Err != nil {\n\t\treturn nil, responseResult, responseResult.Err\n\t}\n\n\t// Extract nodegroups from the response body.\n\tvar result struct {\n\t\tNodegroups []*View `json:\"nodegroups\"`\n\t}\n\terr = responseResult.ExtractResult(&result)\n\tif err != nil {\n\t\treturn nil, responseResult, err\n\t}\n\n\treturn result.Nodegroups, responseResult, err\n}", "func (re *stubRegistrationService) ListByParentID(ctx context.Context, request pb.ParentID) (reply common.RegistrationEntries, err error) {\n\tentries, err := re.registration.ListEntryByParentSpiffeID(request.Id)\n\treply = common.RegistrationEntries{Entries: entries}\n\treturn reply, err\n}", "func (n NotificationChannelList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tpopulate(objectMap, \"nextLink\", n.NextLink)\n\tpopulate(objectMap, \"value\", n.Value)\n\treturn json.Marshal(objectMap)\n}", "func (a *Client) VirtualizationClusterTypesList(params *VirtualizationClusterTypesListParams) (*VirtualizationClusterTypesListOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewVirtualizationClusterTypesListParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"virtualization_cluster-types_list\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/api/virtualization/cluster-types/\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &VirtualizationClusterTypesListReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*VirtualizationClusterTypesListOK), nil\n\n}", "func (s *Serializer) SerializeBatch(metrics []telegraf.Metric) ([]byte, error) {\n\ts.buf.Reset()\n\tfor _, m := range metrics {\n\t\terr := s.Write(&s.buf, m)\n\t\tif err != nil {\n\t\t\tvar mErr *MetricError\n\t\t\tif errors.As(err, &mErr) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tout := make([]byte, 0, s.buf.Len())\n\treturn append(out, s.buf.Bytes()...), nil\n}", "func (o OpenShiftClusterProperties) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"apiserverProfile\", o.ApiserverProfile)\n\tpopulate(objectMap, \"clusterProfile\", o.ClusterProfile)\n\tpopulate(objectMap, \"consoleProfile\", o.ConsoleProfile)\n\tpopulate(objectMap, \"ingressProfiles\", o.IngressProfiles)\n\tpopulate(objectMap, \"masterProfile\", o.MasterProfile)\n\tpopulate(objectMap, \"networkProfile\", o.NetworkProfile)\n\tpopulate(objectMap, \"provisioningState\", o.ProvisioningState)\n\tpopulate(objectMap, \"servicePrincipalProfile\", o.ServicePrincipalProfile)\n\tpopulate(objectMap, \"workerProfiles\", o.WorkerProfiles)\n\treturn json.Marshal(objectMap)\n}", "func (r *Registration) maintainRegistrationAndStreamWork(ctx context.Context) {\n\tregistrationMsg := &scpb.RegisterAndStreamWorkRequest{\n\t\tRegisterExecutorRequest: &scpb.RegisterExecutorRequest{Node: r.node},\n\t}\n\n\tdefer r.setConnected(false)\n\n\tfor {\n\t\tstream, err := r.schedulerClient.RegisterAndStreamWork(ctx)\n\t\tif err != nil {\n\t\t\tif done := sleepWithContext(ctx, registrationFailureRetryInterval); done {\n\t\t\t\tlog.Debugf(\"Context cancelled, cancelling node registration.\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tcontinue\n\t\t}\n\t\tif err := stream.Send(registrationMsg); err != nil {\n\t\t\tlog.Errorf(\"error registering node with scheduler: %s, will retry...\", err)\n\t\t\tcontinue\n\t\t}\n\n\t\tr.setConnected(true)\n\n\t\tschedulerMsgs := make(chan *scpb.RegisterAndStreamWorkResponse)\n\t\tgo func() {\n\t\t\tfor {\n\t\t\t\tmsg, err := stream.Recv()\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Warningf(\"Could not read from stream: %s\", err)\n\t\t\t\t\tclose(schedulerMsgs)\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t\tselect {\n\t\t\t\tcase schedulerMsgs <- msg:\n\t\t\t\tcase <-stream.Context().Done():\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\n\t\tfor {\n\t\t\tdone, err := r.processWorkStream(ctx, stream, schedulerMsgs)\n\t\t\tif err != nil {\n\t\t\t\t_ = stream.CloseSend()\n\t\t\t\tlog.Warningf(\"Error maintaining registration with scheduler, will retry: %s\", err)\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tif done {\n\t\t\t\t_ = stream.CloseSend()\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tr.setConnected(false)\n\t\tif done := sleepWithContext(ctx, registrationFailureRetryInterval); done {\n\t\t\tlog.Debugf(\"Context cancelled, cancelling node registration.\")\n\t\t\treturn\n\t\t}\n\t}\n}", "func (t ListGroupsResponse) Encode(e *Encoder, version int16) {\n\tif version >= 1 {\n\t\te.PutInt32(t.ThrottleTimeMs) // ThrottleTimeMs\n\t}\n\te.PutInt16(t.ErrorCode) // ErrorCode\n\t// Groups\n\tlen2 := len(t.Groups)\n\te.PutArrayLength(len2)\n\tfor i := 0; i < len2; i++ {\n\t\tt.Groups[i].Encode(e, version)\n\t}\n}", "func streamList(w http.ResponseWriter, r *http.Request) {\n\tclaims := r.Context().Value(ctxClaims).(*account.Claims)\n\ts, err := stream.GetStreamsOfUser(uuid.FromStringOrNil(claims.Issuer))\n\tif err != nil {\n\t\tErrGetResource.Send(w)\n\t\treturn\n\t}\n\tvar res Success\n\tres.Message = \"Stream list for user\"\n\tres.Payload = s\n\tres.Send(w)\n\treturn\n}", "func (this *channelStruct) WriteFloats(samples []float64) {\n\tthis.samples = append(this.samples, samples...)\n}", "func List() ([]clusterapi.Cluster, error) {\n\tvar clusterList []clusterapi.Cluster\n\terr := utils.BrowseMetadataContent(clusterapi.ClusterMetadataPrefix, func(buf *bytes.Buffer) error {\n\t\tvar c clusterapi.Cluster\n\t\terr := gob.NewDecoder(buf).Decode(&c)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tclusterList = append(clusterList, c)\n\t\treturn nil\n\t})\n\treturn clusterList, err\n}", "func (client DeploymentsClient) ListForCluster(ctx context.Context, resourceGroupName string, serviceName string, version []string) (result DeploymentResourceCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"/DeploymentsClient.ListForCluster\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.drc.Response.Response != nil {\n\t\t\t\tsc = result.drc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.fn = client.listForClusterNextResults\n\treq, err := client.ListForClusterPreparer(ctx, resourceGroupName, serviceName, version)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListForClusterSender(req)\n\tif err != nil {\n\t\tresult.drc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.drc, err = client.ListForClusterResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", resp, \"Failure responding to request\")\n\t\treturn\n\t}\n\tif result.drc.hasNextLink() && result.drc.IsEmpty() {\n\t\terr = result.NextWithContext(ctx)\n\t\treturn\n\t}\n\n\treturn\n}", "func (r renderer) List(out *bytes.Buffer, text func() bool, flags int) {\n\t// TODO: This is not desired (we'd rather not write lists as part of summary),\n\t// but see this issue: https://github.com/russross/blackfriday/issues/189\n\tmarker := out.Len()\n\tif !text() {\n\t\tout.Truncate(marker)\n\t}\n\tout.Write([]byte{' '})\n}", "func (sqlStore *SQLStore) UnlockClusterInstallations(clusterInstallationIDs []string, lockerID string, force bool) (bool, error) {\n\treturn sqlStore.unlockRows(\"ClusterInstallation\", clusterInstallationIDs, lockerID, force)\n}", "func (ds *DiskSyncer) WriteBatch(a []*Account) error {\n\terr := make(chan error)\n\tds.writeBatch <- &writeBatchRequest{\n\t\ta: a,\n\t\terr: err,\n\t}\n\treturn <-err\n}", "func (handler Handler) WriteJSONListResult(w http.ResponseWriter, total int, v interface{}, statusCode int) error {\n\tresult := Result{}\n\tresult.Total = total\n\tresult.Result = v\n\treturn handler.WriteJSON(w, result, statusCode)\n}", "func (d *Dumper) DumpDeployerRegistrations(ctx context.Context) error {\n\tdrList := &lsv1alpha1.DeployerRegistrationList{}\n\tif err := d.kubeClient.List(ctx, drList); err != nil {\n\t\treturn fmt.Errorf(\"unable to list environments: %w\", err)\n\t}\n\tfor _, dr := range drList.Items {\n\t\tif err := DumpDeployerRegistration(d.logger, &dr); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func RenderCluster(out io.Writer, cluster sind.ClusterStatus) {\n\twr := tabwriter.NewWriter(out, 4, 8, 2, '\\t', 0)\n\tdefer wr.Flush()\n\n\tfmt.Fprintf(\n\t\twr,\n\t\t\"Name: %s\\tStatus: %s\\tManagers: %s\\t Workers: %s\\t\\n\",\n\t\tstyle.Important(cluster.Name),\n\t\tstyle.Important(status(cluster)),\n\t\tstyle.Important(fmt.Sprintf(\"%d/%d\", cluster.ManagersRunning, cluster.Managers)),\n\t\tstyle.Important(fmt.Sprintf(\"%d/%d\", cluster.WorkersRunning, cluster.Workers)),\n\t)\n\n\tfmt.Fprintf(wr, \"ID\\tImage\\tRole\\tStatus\\tIPs\\t\\n\")\n\tfmt.Fprintf(wr, \"--\\t-----\\t----\\t------\\t---\\t\\n\")\n\n\tfor _, node := range cluster.Nodes {\n\t\tfmt.Fprintf(\n\t\t\twr,\n\t\t\t\"%s\\t%s\\t%s\\t%s\\t%s\\t\\n\",\n\t\t\tnode.ID[0:11],\n\t\t\tnode.Image,\n\t\t\tclusterRole(node),\n\t\t\tnode.Status,\n\t\t\tnodeIP(node),\n\t\t)\n\t}\n}", "func List(ctx context.Context, filters container.FilterBuilder) ([]*types.Node, error) {\n\tres := []*types.Node{}\n\tvisit := func(ctx context.Context, cluster string, node *types.Node) {\n\t\tres = append(res, node)\n\t}\n\treturn res, list(ctx, visit, filters)\n}", "func IPListWriteFromCIDRs(cidrStrs []string) error {\n\tfor _, cidr := range cidrStrs {\n\t\tif err := IPListWriteFromCIDR(cidr); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (c ClusterProfile) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"domain\", c.Domain)\n\tpopulate(objectMap, \"fipsValidatedModules\", c.FipsValidatedModules)\n\tpopulate(objectMap, \"pullSecret\", c.PullSecret)\n\tpopulate(objectMap, \"resourceGroupId\", c.ResourceGroupID)\n\tpopulate(objectMap, \"version\", c.Version)\n\treturn json.Marshal(objectMap)\n}", "func (e *EndpointSessions) listSessions(writer http.ResponseWriter, request *http.Request) {\n\tsess, usr := GetSessionAndUser(e.sessions, e.users, writer, request)\n\tif usr == nil {\n\t\treturn\n\t}\n\n\t//only admin users are allowed to view all sessions\n\tif sess.User != user.ADMIN {\n\t\thttp.Error(writer, \"user must be admin\", http.StatusForbidden)\n\t\treturn\n\t}\n\n\tsessions, err := e.sessions.List()\n\tif err != nil {\n\t\thttp.Error(writer, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tWriteJSONBody(writer, sessions)\n}", "func (ehcglr EventHubConsumerGroupsListResult) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]interface{})\n\tif ehcglr.Value != nil {\n\t\tobjectMap[\"value\"] = ehcglr.Value\n\t}\n\treturn json.Marshal(objectMap)\n}", "func clusterList() []string {\n\tif c := envy.String(\"DQLITED_CLUSTER\"); c != \"\" {\n\t\treturn strings.Split(c, \",\")\n\t}\n\treturn defaultCluster\n}", "func RotateCluster(cluster *model.Cluster, logger *logrus.Entry, rotatorMetadata *RotatorMetadata) (*RotatorMetadata, error) {\n\tclientset, err := getk8sClientset(cluster)\n\tif err != nil {\n\t\treturn rotatorMetadata, err\n\t}\n\n\tif rotatorMetadata.MasterGroups == nil && rotatorMetadata.WorkerGroups == nil {\n\t\terr = rotatorMetadata.GetSetAutoscalingGroups(cluster)\n\t\tif err != nil {\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\t}\n\n\tfor index, masterASG := range rotatorMetadata.MasterGroups {\n\t\tlogger.Infof(\"The autoscaling group %s has %d instance(s)\", masterASG.Name, masterASG.DesiredCapacity)\n\n\t\terr = MasterNodeRotation(cluster, &masterASG, clientset, logger)\n\t\tif err != nil {\n\t\t\trotatorMetadata.MasterGroups[index] = masterASG\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\trotatorMetadata.MasterGroups[index] = masterASG\n\n\t\tlogger.Infof(\"Checking that all %d nodes are running...\", masterASG.DesiredCapacity)\n\t\terr = FinalCheck(&masterASG, clientset, logger)\n\t\tif err != nil {\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\tlogger.Infof(\"ASG %s rotated successfully.\", masterASG.Name)\n\t}\n\n\tfor index, workerASG := range rotatorMetadata.WorkerGroups {\n\t\tlogger.Infof(\"The autoscaling group %s has %d instance(s)\", workerASG.Name, workerASG.DesiredCapacity)\n\n\t\terr = WorkerNodeRotation(cluster, &workerASG, clientset, logger)\n\t\tif err != nil {\n\t\t\trotatorMetadata.WorkerGroups[index] = workerASG\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\trotatorMetadata.WorkerGroups[index] = workerASG\n\n\t\tlogger.Infof(\"Checking that all %d nodes are running...\", workerASG.DesiredCapacity)\n\t\terr = FinalCheck(&workerASG, clientset, logger)\n\t\tif err != nil {\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\tlogger.Infof(\"ASG %s rotated successfully.\", workerASG.Name)\n\t}\n\n\tlogger.Info(\"All ASGs rotated successfully\")\n\treturn rotatorMetadata, nil\n}", "func NewNotificationSection_List(s *capnp.Segment, sz int32) (NotificationSection_List, error) {\n\tl, err := capnp.NewCompositeList(s, capnp.ObjectSize{DataSize: 8, PointerCount: 2}, sz)\n\treturn NotificationSection_List{l}, err\n}", "func (client DeploymentsClient) ListForClusterResponder(resp *http.Response) (result DeploymentResourceCollection, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (c *Cluster) List() dcs.List {\n\treturn dcs.List{\n\t\tOptions: c.cfg.DCOptions,\n\t\tDomains: c.domains,\n\t}\n}", "func (s *uploadMetaListSerializer) Serialize(uploadMetas []commitgraph.UploadMeta) []byte {\n\ts.write(uploadMetas)\n\treturn s.take()\n}", "func (o *PostDockerRegistriesSearchListParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tr.SetTimeout(o.timeout)\n\tvar res []error\n\n\tif o.Metadata == nil {\n\t\to.Metadata = new(models.DockerRegistryImageSearch)\n\t}\n\n\tif err := r.SetBodyParam(o.Metadata); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (npMgr *NetworkPolicyManager) SendClusterMetrics() {\n\tvar (\n\t\theartbeat = time.NewTicker(time.Minute * heartbeatIntervalInMinutes).C\n\t\tcustomDimensions = map[string]string{\"ClusterID\": util.GetClusterID(npMgr.NodeName),\n\t\t\t\"APIServer\": npMgr.serverVersion.String()}\n\t\tpodCount = aitelemetry.Metric{\n\t\t\tName: \"PodCount\",\n\t\t\tCustomDimensions: customDimensions,\n\t\t}\n\t\tnsCount = aitelemetry.Metric{\n\t\t\tName: \"NsCount\",\n\t\t\tCustomDimensions: customDimensions,\n\t\t}\n\t\tnwPolicyCount = aitelemetry.Metric{\n\t\t\tName: \"NwPolicyCount\",\n\t\t\tCustomDimensions: customDimensions,\n\t\t}\n\t)\n\n\tfor {\n\t\t<-heartbeat\n\t\tnpMgr.Lock()\n\t\tpodCount.Value = 0\n\t\t//Reducing one to remove all-namespaces ns obj\n\t\tnsCount.Value = float64(len(npMgr.NsMap) - 1)\n\t\tnwPolicyCount.Value += float64(len(npMgr.RawNpMap))\n\t\tpodCount.Value += float64(len(npMgr.PodMap))\n\t\tnpMgr.Unlock()\n\n\t\tmetrics.SendMetric(podCount)\n\t\tmetrics.SendMetric(nsCount)\n\t\tmetrics.SendMetric(nwPolicyCount)\n\t}\n}" ]
[ "0.714264", "0.62940454", "0.54479", "0.5051815", "0.4918116", "0.4914111", "0.48883167", "0.48088843", "0.4786973", "0.47607687", "0.47010523", "0.46967945", "0.46888116", "0.46668252", "0.46635702", "0.46625742", "0.46205908", "0.46101937", "0.45741528", "0.45734975", "0.4538628", "0.4531599", "0.45123142", "0.45122474", "0.44907552", "0.44886124", "0.44499144", "0.44353327", "0.4429958", "0.4429947", "0.44201007", "0.44101033", "0.44007736", "0.43888366", "0.4384569", "0.4372486", "0.43619832", "0.4358042", "0.43569455", "0.4342676", "0.433554", "0.43291998", "0.43160364", "0.4284268", "0.42825252", "0.4249162", "0.42457741", "0.4245468", "0.42446536", "0.42386916", "0.42296985", "0.42254916", "0.4206135", "0.4201094", "0.41921014", "0.41903627", "0.41902617", "0.4179794", "0.41792434", "0.4169431", "0.41614658", "0.41358218", "0.4135674", "0.41343006", "0.41041735", "0.41006178", "0.4083735", "0.4083043", "0.4081263", "0.40722218", "0.40695897", "0.4066272", "0.4062055", "0.4057533", "0.40543976", "0.40474546", "0.40453798", "0.4039213", "0.40375775", "0.40282965", "0.40222636", "0.40219164", "0.4021259", "0.4014332", "0.4011721", "0.40100026", "0.40048552", "0.39978066", "0.39934844", "0.39921695", "0.3988846", "0.39884022", "0.3984199", "0.39824298", "0.39809027", "0.3978583", "0.3977949", "0.39771283", "0.39758593", "0.39613998" ]
0.85305786
0
UnmarshalClusterRegistrationList reads a list of values of the 'cluster_registration' type from the given source, which can be a slice of bytes, a string or a reader.
func UnmarshalClusterRegistrationList(source interface{}) (items []*ClusterRegistration, err error) { iterator, err := helpers.NewIterator(source) if err != nil { return } items = readClusterRegistrationList(iterator) err = iterator.Error return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func readClusterRegistrationList(iterator *jsoniter.Iterator) []*ClusterRegistration {\n\tlist := []*ClusterRegistration{}\n\tfor iterator.ReadArray() {\n\t\titem := readClusterRegistration(iterator)\n\t\tlist = append(list, item)\n\t}\n\treturn list\n}", "func MarshalClusterRegistrationList(list []*ClusterRegistration, writer io.Writer) error {\n\tstream := helpers.NewStream(writer)\n\twriteClusterRegistrationList(list, stream)\n\tstream.Flush()\n\treturn stream.Error\n}", "func UnmarshalNotificationsRegistration(m map[string]json.RawMessage, result interface{}) (err error) {\n\tobj := new(NotificationsRegistration)\n\terr = core.UnmarshalPrimitive(m, \"event_notifications_instance_crn\", &obj.EventNotificationsInstanceCrn)\n\tif err != nil {\n\t\treturn\n\t}\n\treflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj))\n\treturn\n}", "func (o *OpenShiftClusterList) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", o, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &o.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &o.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", o, err)\n\t\t}\n\t}\n\treturn nil\n}", "func writeClusterRegistrationList(list []*ClusterRegistration, stream *jsoniter.Stream) {\n\tstream.WriteArrayStart()\n\tfor i, value := range list {\n\t\tif i > 0 {\n\t\t\tstream.WriteMore()\n\t\t}\n\t\twriteClusterRegistration(value, stream)\n\t}\n\tstream.WriteArrayEnd()\n}", "func (s *SubscriptionFeatureRegistrationList) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &s.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (x *RegistrationSource) UnmarshalJSON(b []byte) error {\n\tnum, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*x = RegistrationSource(num)\n\treturn nil\n}", "func UnmarshalAnalyticsEngineClusterNodeSlice(s []interface{}) (slice []AnalyticsEngineClusterNode, err error) {\n\tfor _, v := range s {\n\t\tobjMap, ok := v.(map[string]interface{})\n\t\tif !ok {\n\t\t\terr = fmt.Errorf(\"slice element should be a map containing an instance of 'AnalyticsEngineClusterNode'\")\n\t\t\treturn\n\t\t}\n\t\tobj, e := UnmarshalAnalyticsEngineClusterNode(objMap)\n\t\tif e != nil {\n\t\t\terr = e\n\t\t\treturn\n\t\t}\n\t\tslice = append(slice, *obj)\n\t}\n\treturn\n}", "func DeserializeStringListFromBytes(rawResponse []byte) (vals []string, err error) {\n\tvar metaResponse GList\n\tif len(rawResponse) == 0 {\n\t\terr = errors.New(\"DeserializeStringListFromBytes: nothing to decode\")\n\t\treturn\n\t}\n\tdec := json.NewDecoder(bytes.NewReader(rawResponse))\n\tdec.DisallowUnknownFields()\n\tif err = dec.Decode(&metaResponse); err != nil {\n\t\treturn\n\t}\n\n\tif metaResponse.Type != \"g:List\" {\n\t\terr = errors.New(\"DeserializeStringListFromBytes: Expected `g:List` type\")\n\t\treturn\n\t}\n\n\tif err = json.Unmarshal(metaResponse.Value, &vals); err != nil {\n\t\treturn\n\t}\n\treturn\n}", "func (lce *ListClusterEvent) UnmarshalJSON(body []byte) error {\n\tce, err := unmarshalBasicClusterEventArray(body)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlce.Value = &ce\n\n\treturn nil\n}", "func Convert_v1alpha1_ControllerRegistrationList_To_core_ControllerRegistrationList(in *ControllerRegistrationList, out *core.ControllerRegistrationList, s conversion.Scope) error {\n\treturn autoConvert_v1alpha1_ControllerRegistrationList_To_core_ControllerRegistrationList(in, out, s)\n}", "func (c *cache) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif c == nil || c.controllerCache == nil {\n\t\treturn nil, fmt.Errorf(cacheNotInitializedErr)\n\t}\n\tappRegList := &storkv1alpha1.ApplicationRegistrationList{}\n\tif err := c.controllerCache.List(context.Background(), appRegList); err != nil {\n\t\treturn nil, err\n\t}\n\treturn appRegList, nil\n}", "func (a *Client) VirtualizationClusterTypesList(params *VirtualizationClusterTypesListParams) (*VirtualizationClusterTypesListOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewVirtualizationClusterTypesListParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"virtualization_cluster-types_list\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/api/virtualization/cluster-types/\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &VirtualizationClusterTypesListReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*VirtualizationClusterTypesListOK), nil\n\n}", "func UnmarshalClusterConfiguration(yaml string) (*bootstrapv1.ClusterConfiguration, error) {\n\tobj := &bootstrapv1.ClusterConfiguration{}\n\tif err := unmarshalFromVersions(yaml, clusterConfigurationVersionTypeMap, obj); err != nil {\n\t\treturn nil, err\n\t}\n\treturn obj, nil\n}", "func (ssr *SQLServerRegistration) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"properties\":\n\t\t\tif v != nil {\n\t\t\t\tvar SQLServerRegistrationProperties SQLServerRegistrationProperties\n\t\t\t\terr = json.Unmarshal(*v, &SQLServerRegistrationProperties)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tssr.SQLServerRegistrationProperties = &SQLServerRegistrationProperties\n\t\t\t}\n\t\tcase \"location\":\n\t\t\tif v != nil {\n\t\t\t\tvar location string\n\t\t\t\terr = json.Unmarshal(*v, &location)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tssr.Location = &location\n\t\t\t}\n\t\tcase \"tags\":\n\t\t\tif v != nil {\n\t\t\t\tvar tags map[string]*string\n\t\t\t\terr = json.Unmarshal(*v, &tags)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tssr.Tags = tags\n\t\t\t}\n\t\tcase \"id\":\n\t\t\tif v != nil {\n\t\t\t\tvar ID string\n\t\t\t\terr = json.Unmarshal(*v, &ID)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tssr.ID = &ID\n\t\t\t}\n\t\tcase \"name\":\n\t\t\tif v != nil {\n\t\t\t\tvar name string\n\t\t\t\terr = json.Unmarshal(*v, &name)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tssr.Name = &name\n\t\t\t}\n\t\tcase \"type\":\n\t\t\tif v != nil {\n\t\t\t\tvar typeVar string\n\t\t\t\terr = json.Unmarshal(*v, &typeVar)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tssr.Type = &typeVar\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *OpenShiftCluster) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", o, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &o.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"location\":\n\t\t\terr = unpopulate(val, \"Location\", &o.Location)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &o.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &o.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"systemData\":\n\t\t\terr = unpopulate(val, \"SystemData\", &o.SystemData)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"tags\":\n\t\t\terr = unpopulate(val, \"Tags\", &o.Tags)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &o.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", o, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (l *LineRegistration) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", l, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"channelAccessToken\":\n\t\t\terr = unpopulate(val, \"ChannelAccessToken\", &l.ChannelAccessToken)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"channelSecret\":\n\t\t\terr = unpopulate(val, \"ChannelSecret\", &l.ChannelSecret)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"generatedId\":\n\t\t\terr = unpopulate(val, \"GeneratedID\", &l.GeneratedID)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", l, err)\n\t\t}\n\t}\n\treturn nil\n}", "func UnregisterCluster(c echo.Context) error {\n\tcblog.Info(\"call UnregisterCluster()\")\n\n\tvar req struct {\n\t\tConnectionName string\n\t}\n\n\tif err := c.Bind(&req); err != nil {\n\t\treturn echo.NewHTTPError(http.StatusInternalServerError, err.Error())\n\t}\n\n\t// Call common-runtime API\n\tresult, err := cmrt.UnregisterResource(req.ConnectionName, rsCluster, c.Param(\"Name\"))\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusInternalServerError, err.Error())\n\t}\n\n\tresultInfo := BooleanInfo{\n\t\tResult: strconv.FormatBool(result),\n\t}\n\n\treturn c.JSON(http.StatusOK, &resultInfo)\n}", "func UnmarshalAnalyticsEngineResizeClusterResponseSlice(s []interface{}) (slice []AnalyticsEngineResizeClusterResponse, err error) {\n\tfor _, v := range s {\n\t\tobjMap, ok := v.(map[string]interface{})\n\t\tif !ok {\n\t\t\terr = fmt.Errorf(\"slice element should be a map containing an instance of 'AnalyticsEngineResizeClusterResponse'\")\n\t\t\treturn\n\t\t}\n\t\tobj, e := UnmarshalAnalyticsEngineResizeClusterResponse(objMap)\n\t\tif e != nil {\n\t\t\terr = e\n\t\t\treturn\n\t\t}\n\t\tslice = append(slice, *obj)\n\t}\n\treturn\n}", "func ValidateControllerRegistration(controllerRegistration *core.ControllerRegistration) field.ErrorList {\n\tallErrs := field.ErrorList{}\n\n\tallErrs = append(allErrs, apivalidation.ValidateObjectMeta(&controllerRegistration.ObjectMeta, false, apivalidation.NameIsDNSLabel, field.NewPath(\"metadata\"))...)\n\tallErrs = append(allErrs, ValidateControllerRegistrationSpec(&controllerRegistration.Spec, field.NewPath(\"spec\"))...)\n\n\treturn allErrs\n}", "func (o OpenShiftClusterList) MarshalJSON() ([]byte, error) {\n\tobjectMap := make(map[string]any)\n\tpopulate(objectMap, \"nextLink\", o.NextLink)\n\tpopulate(objectMap, \"value\", o.Value)\n\treturn json.Marshal(objectMap)\n}", "func (c *Client) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif err := c.initClient(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn c.stork.StorkV1alpha1().ApplicationRegistrations().List(context.TODO(), metav1.ListOptions{})\n\n}", "func (d *RegistrationResultPayload) UnmarshalBinary(b []byte) error {\n\tps, err := ParseMultiParams(b)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(ps) != 3 {\n\t\treturn ErrInvalidLength\n\t}\n\n\td.LocalRoutingKeyIdentifier = ps[0]\n\td.RegistrationStatus = ps[1]\n\td.RoutingContext = ps[2]\n\n\treturn nil\n}", "func ClusterInstallationsFromReader(reader io.Reader) ([]*ClusterInstallation, error) {\n\tclusterInstallations := []*ClusterInstallation{}\n\tdecoder := json.NewDecoder(reader)\n\n\terr := decoder.Decode(&clusterInstallations)\n\tif err != nil && err != io.EOF {\n\t\treturn nil, err\n\t}\n\n\treturn clusterInstallations, nil\n}", "func Convert_core_ControllerRegistrationList_To_v1alpha1_ControllerRegistrationList(in *core.ControllerRegistrationList, out *ControllerRegistrationList, s conversion.Scope) error {\n\treturn autoConvert_core_ControllerRegistrationList_To_v1alpha1_ControllerRegistrationList(in, out, s)\n}", "func (r *AWSManagedClusterList) ConvertTo(dstRaw conversion.Hub) error {\n\tdst := dstRaw.(*v1alpha4.AWSManagedClusterList)\n\n\treturn Convert_v1alpha3_AWSManagedClusterList_To_v1alpha4_AWSManagedClusterList(r, dst, nil)\n}", "func (s *SAPCentralInstanceList) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &s.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func ParseRegistryConfiguration(yamlSource string) (RegistryList, error) {\n\tvar regList RegistryList\n\tvar defaultPrefixFound = \"\"\n\terr := yaml.UnmarshalStrict([]byte(yamlSource), &regList)\n\tif err != nil {\n\t\treturn RegistryList{}, err\n\t}\n\n\t// validate the parsed list\n\tfor _, registry := range regList.Items {\n\t\tif registry.Name == \"\" {\n\t\t\terr = fmt.Errorf(\"registry name is missing for entry %v\", registry)\n\t\t} else if registry.ApiURL == \"\" {\n\t\t\terr = fmt.Errorf(\"API URL must be specified for registry %s\", registry.Name)\n\t\t} else if registry.Prefix == \"\" {\n\t\t\tif defaultPrefixFound != \"\" {\n\t\t\t\terr = fmt.Errorf(\"there must be only one default registry (already is %s), %s needs a prefix\", defaultPrefixFound, registry.Name)\n\t\t\t} else {\n\t\t\t\tdefaultPrefixFound = registry.Name\n\t\t\t}\n\t\t}\n\n\t\tif err == nil {\n\t\t\tswitch registry.TagSortMode {\n\t\t\tcase \"latest-first\", \"latest-last\", \"none\", \"\":\n\t\t\tdefault:\n\t\t\t\terr = fmt.Errorf(\"unknown tag sort mode for registry %s: %s\", registry.Name, registry.TagSortMode)\n\t\t\t}\n\t\t}\n\t}\n\n\tif err != nil {\n\t\treturn RegistryList{}, err\n\t}\n\n\treturn regList, nil\n}", "func (r *AWSManagedClusterList) ConvertFrom(srcRaw conversion.Hub) error {\n\tsrc := srcRaw.(*v1alpha4.AWSManagedClusterList)\n\n\treturn Convert_v1alpha4_AWSManagedClusterList_To_v1alpha3_AWSManagedClusterList(src, r, nil)\n}", "func (d *DscpConfigurationListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &d.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &d.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (s *SubscriptionFeatureRegistration) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"id\":\n\t\t\terr = unpopulate(val, \"ID\", &s.ID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"name\":\n\t\t\terr = unpopulate(val, \"Name\", &s.Name)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"properties\":\n\t\t\terr = unpopulate(val, \"Properties\", &s.Properties)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"type\":\n\t\t\terr = unpopulate(val, \"Type\", &s.Type)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (s *ClusterListener) ListMasters(inctx context.Context, in *protocol.Reference) (_ *protocol.ClusterNodeListResponse, err error) {\n\tdefer fail.OnExitConvertToGRPCStatus(inctx, &err)\n\tdefer fail.OnExitWrapError(inctx, &err, \"cannot list masters\")\n\n\tif s == nil {\n\t\treturn nil, fail.InvalidInstanceError()\n\t}\n\tif in == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"in\")\n\t}\n\tif inctx == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"inctx\")\n\t}\n\n\tclusterName, _ := srvutils.GetReference(in)\n\tif clusterName == \"\" {\n\t\treturn nil, fail.InvalidRequestError(\"cluster name is missing\")\n\t}\n\n\tjob, err := PrepareJob(inctx, in.GetTenantId(), fmt.Sprintf(\"/cluster/%s/masters/list\", clusterName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer job.Close()\n\n\thandler := handlers.NewClusterHandler(job)\n\tlist, xerr := handler.ListMasters(clusterName)\n\tif xerr != nil {\n\t\treturn nil, xerr\n\t}\n\n\tout, xerr := converters.IndexedListOfClusterNodesFromResourceToProtocol(list)\n\tif xerr != nil {\n\t\treturn nil, xerr\n\t}\n\n\treturn out, nil\n}", "func (in *ClusterEventBusList) DeepCopy() *ClusterEventBusList {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(ClusterEventBusList)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func UnmarshalInstanceSourceDetailsSlice(reader io.Reader, consumer runtime.Consumer) ([]InstanceSourceDetails, error) {\n\tvar elements []json.RawMessage\n\tif err := consumer.Consume(reader, &elements); err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar result []InstanceSourceDetails\n\tfor _, element := range elements {\n\t\tobj, err := unmarshalInstanceSourceDetails(element, consumer)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresult = append(result, obj)\n\t}\n\treturn result, nil\n}", "func (i *IPAllocationListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", i, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &i.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &i.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", i, err)\n\t\t}\n\t}\n\treturn nil\n}", "func clustersFromSnapshot(cfgSnap *proxycfg.ConfigSnapshot, token string) ([]proto.Message, error) {\n\tif cfgSnap == nil {\n\t\treturn nil, errors.New(\"nil config given\")\n\t}\n\t// Include the \"app\" cluster for the public listener\n\tclusters := make([]proto.Message, len(cfgSnap.Proxy.Upstreams)+1)\n\n\tvar err error\n\tclusters[0], err = makeAppCluster(cfgSnap)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor idx, upstream := range cfgSnap.Proxy.Upstreams {\n\t\tclusters[idx+1], err = makeUpstreamCluster(upstream, cfgSnap)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\treturn clusters, nil\n}", "func (c *ConnectivityConfigurationListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &c.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &c.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (r *RegisteredPrefixListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &r.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &r.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", r, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (r *Registration) UnmarshalConfig() ([]byte, error) {\n\treturn base64.StdEncoding.DecodeString(r.Config)\n}", "func (dst *AzureClusterList) ConvertFrom(srcRaw conversion.Hub) error { // nolint\n\tsrc := srcRaw.(*infrav1alpha4.AzureClusterList)\n\treturn Convert_v1alpha4_AzureClusterList_To_v1alpha3_AzureClusterList(src, dst, nil)\n}", "func (s *SecurityGroupListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &s.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (vl *ValueList) UnmarshalJSON(data []byte) error {\n\tvar jvl jsonValueList\n\n\tif err := json.Unmarshal(data, &jvl); err != nil {\n\t\treturn err\n\t}\n\n\tvl.Host = jvl.Host\n\tvl.Plugin = jvl.Plugin\n\tvl.PluginInstance = jvl.PluginInstance\n\tvl.Type = jvl.Type\n\tvl.TypeInstance = jvl.TypeInstance\n\n\tvl.Time = jvl.Time.Time()\n\tvl.Interval = jvl.Interval.Duration()\n\tvl.Values = make([]Value, len(jvl.Values))\n\n\tif len(jvl.Values) != len(jvl.DSTypes) {\n\t\treturn fmt.Errorf(\"invalid data: %d value(s), %d data source type(s)\",\n\t\t\tlen(jvl.Values), len(jvl.DSTypes))\n\t}\n\n\tfor i, n := range jvl.Values {\n\t\tswitch jvl.DSTypes[i] {\n\t\tcase \"gauge\":\n\t\t\tv, err := n.Float64()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvl.Values[i] = Gauge(v)\n\t\tcase \"derive\":\n\t\t\tv, err := n.Int64()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvl.Values[i] = Derive(v)\n\t\tcase \"counter\":\n\t\t\tv, err := n.Int64()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tvl.Values[i] = Counter(v)\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"unexpected data source type: %q\", jvl.DSTypes[i])\n\t\t}\n\t}\n\n\tif len(jvl.DSNames) >= len(vl.Values) {\n\t\tvl.DSNames = make([]string, len(vl.Values))\n\t\tcopy(vl.DSNames, jvl.DSNames)\n\t}\n\n\tvl.Meta = jvl.Meta\n\n\treturn nil\n}", "func (c *ClusterProfile) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"domain\":\n\t\t\terr = unpopulate(val, \"Domain\", &c.Domain)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"fipsValidatedModules\":\n\t\t\terr = unpopulate(val, \"FipsValidatedModules\", &c.FipsValidatedModules)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"pullSecret\":\n\t\t\terr = unpopulate(val, \"PullSecret\", &c.PullSecret)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"resourceGroupId\":\n\t\t\terr = unpopulate(val, \"ResourceGroupID\", &c.ResourceGroupID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"version\":\n\t\t\terr = unpopulate(val, \"Version\", &c.Version)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (src *AzureClusterList) ConvertTo(dstRaw conversion.Hub) error { // nolint\n\tdst := dstRaw.(*infrav1alpha4.AzureClusterList)\n\treturn Convert_v1alpha3_AzureClusterList_To_v1alpha4_AzureClusterList(src, dst, nil)\n}", "func UnmarshalSourceList(m map[string]json.RawMessage, result interface{}) (err error) {\n\tobj := new(SourceList)\n\terr = core.UnmarshalPrimitive(m, \"total_count\", &obj.TotalCount)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalPrimitive(m, \"offset\", &obj.Offset)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalPrimitive(m, \"limit\", &obj.Limit)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalModel(m, \"sources\", &obj.Sources, UnmarshalSourceListItem)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalModel(m, \"first\", &obj.First, UnmarshalPageHrefResponse)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalModel(m, \"previous\", &obj.Previous, UnmarshalPageHrefResponse)\n\tif err != nil {\n\t\treturn\n\t}\n\terr = core.UnmarshalModel(m, \"next\", &obj.Next, UnmarshalPageHrefResponse)\n\tif err != nil {\n\t\treturn\n\t}\n\treflect.ValueOf(result).Elem().Set(reflect.ValueOf(obj))\n\treturn\n}", "func UnmarshalClusterStatus(yaml string) (*bootstrapv1.ClusterStatus, error) {\n\tobj := &bootstrapv1.ClusterStatus{}\n\tif err := unmarshalFromVersions(yaml, clusterStatusVersionTypeMap, obj); err != nil {\n\t\treturn nil, err\n\t}\n\treturn obj, nil\n}", "func (_UpkeepRegistrationRequests *UpkeepRegistrationRequestsFilterer) ParseRegistrationRequested(log types.Log) (*UpkeepRegistrationRequestsRegistrationRequested, error) {\n\tevent := new(UpkeepRegistrationRequestsRegistrationRequested)\n\tif err := _UpkeepRegistrationRequests.contract.UnpackLog(event, \"RegistrationRequested\", log); err != nil {\n\t\treturn nil, err\n\t}\n\tevent.Raw = log\n\treturn event, nil\n}", "func (m *SvcConfigDiscoveryRequest) UnmarshalJSON(b []byte) error {\n\treturn SvcConfigDiscoveryRequestJSONUnmarshaler.Unmarshal(bytes.NewReader(b), m)\n}", "func (h *NotificationHub) Registrations(ctx context.Context) (raw []byte, registrations *Registrations, err error) {\n\traw, _, err = h.exec(ctx, getMethod, h.generateAPIURL(\"registrations\"), Headers{}, nil)\n\tif err != nil {\n\t\treturn\n\t}\n\tif err = xml.Unmarshal(raw, &registrations); err != nil {\n\t\treturn\n\t}\n\tregistrations.normalize()\n\treturn\n}", "func UnmarshalAnalyticsEngineWhitelistResponseSlice(s []interface{}) (slice []AnalyticsEngineWhitelistResponse, err error) {\n\tfor _, v := range s {\n\t\tobjMap, ok := v.(map[string]interface{})\n\t\tif !ok {\n\t\t\terr = fmt.Errorf(\"slice element should be a map containing an instance of 'AnalyticsEngineWhitelistResponse'\")\n\t\t\treturn\n\t\t}\n\t\tobj, e := UnmarshalAnalyticsEngineWhitelistResponse(objMap)\n\t\tif e != nil {\n\t\t\terr = e\n\t\t\treturn\n\t\t}\n\t\tslice = append(slice, *obj)\n\t}\n\treturn\n}", "func Unmarshal(data []byte, val interface{}) error {\n\ts := nvlistReader{\n\t\tnvlist: data,\n\t}\n\tif err := s.readNvHeader(); err != nil {\n\t\treturn err\n\t}\n\treturn s.readPairs(reflect.ValueOf(val))\n}", "func (c *CdnPeeringPrefixListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &c.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &c.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Cluster) UnmarshalJSON(body []byte) error {\n\tvar m map[string]*json.RawMessage\n\terr := json.Unmarshal(body, &m)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfor k, v := range m {\n\t\tswitch k {\n\t\tcase \"identity\":\n\t\t\tif v != nil {\n\t\t\t\tvar identity Identity\n\t\t\t\terr = json.Unmarshal(*v, &identity)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.Identity = &identity\n\t\t\t}\n\t\tcase \"sku\":\n\t\t\tif v != nil {\n\t\t\t\tvar sku ClusterSku\n\t\t\t\terr = json.Unmarshal(*v, &sku)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.Sku = &sku\n\t\t\t}\n\t\tcase \"properties\":\n\t\t\tif v != nil {\n\t\t\t\tvar clusterProperties ClusterProperties\n\t\t\t\terr = json.Unmarshal(*v, &clusterProperties)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.ClusterProperties = &clusterProperties\n\t\t\t}\n\t\tcase \"tags\":\n\t\t\tif v != nil {\n\t\t\t\tvar tags map[string]*string\n\t\t\t\terr = json.Unmarshal(*v, &tags)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.Tags = tags\n\t\t\t}\n\t\tcase \"location\":\n\t\t\tif v != nil {\n\t\t\t\tvar location string\n\t\t\t\terr = json.Unmarshal(*v, &location)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.Location = &location\n\t\t\t}\n\t\tcase \"id\":\n\t\t\tif v != nil {\n\t\t\t\tvar ID string\n\t\t\t\terr = json.Unmarshal(*v, &ID)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.ID = &ID\n\t\t\t}\n\t\tcase \"name\":\n\t\t\tif v != nil {\n\t\t\t\tvar name string\n\t\t\t\terr = json.Unmarshal(*v, &name)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.Name = &name\n\t\t\t}\n\t\tcase \"type\":\n\t\t\tif v != nil {\n\t\t\t\tvar typeVar string\n\t\t\t\terr = json.Unmarshal(*v, &typeVar)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tc.Type = &typeVar\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (v *VirtualNetworkListUsageResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &v.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &v.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", v, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (_UpkeepRegistrationRequests *UpkeepRegistrationRequestsFilterer) FilterRegistrationRequested(opts *bind.FilterOpts, hash [][32]byte, upkeepContract []common.Address, source []uint8) (*UpkeepRegistrationRequestsRegistrationRequestedIterator, error) {\n\n\tvar hashRule []interface{}\n\tfor _, hashItem := range hash {\n\t\thashRule = append(hashRule, hashItem)\n\t}\n\n\tvar upkeepContractRule []interface{}\n\tfor _, upkeepContractItem := range upkeepContract {\n\t\tupkeepContractRule = append(upkeepContractRule, upkeepContractItem)\n\t}\n\n\tvar sourceRule []interface{}\n\tfor _, sourceItem := range source {\n\t\tsourceRule = append(sourceRule, sourceItem)\n\t}\n\n\tlogs, sub, err := _UpkeepRegistrationRequests.contract.FilterLogs(opts, \"RegistrationRequested\", hashRule, upkeepContractRule, sourceRule)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &UpkeepRegistrationRequestsRegistrationRequestedIterator{contract: _UpkeepRegistrationRequests.contract, event: \"RegistrationRequested\", logs: logs, sub: sub}, nil\n}", "func (rdl RegistrationDefinitionList) registrationDefinitionListPreparer(ctx context.Context) (*http.Request, error) {\n\tif !rdl.hasNextLink() {\n\t\treturn nil, nil\n\t}\n\treturn autorest.Prepare((&http.Request{}).WithContext(ctx),\n\t\tautorest.AsJSON(),\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(to.String(rdl.NextLink)))\n}", "func UnmarshalAnalyticsEngineResetClusterPasswordResponseSlice(s []interface{}) (slice []AnalyticsEngineResetClusterPasswordResponse, err error) {\n\tfor _, v := range s {\n\t\tobjMap, ok := v.(map[string]interface{})\n\t\tif !ok {\n\t\t\terr = fmt.Errorf(\"slice element should be a map containing an instance of 'AnalyticsEngineResetClusterPasswordResponse'\")\n\t\t\treturn\n\t\t}\n\t\tobj, e := UnmarshalAnalyticsEngineResetClusterPasswordResponse(objMap)\n\t\tif e != nil {\n\t\t\terr = e\n\t\t\treturn\n\t\t}\n\t\tslice = append(slice, *obj)\n\t}\n\treturn\n}", "func (e *EventHubConsumerGroupsListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", e, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &e.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &e.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", e, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (s *Services) List(q *QueryOptions) ([]*ServiceRegistrationListStub, *QueryMeta, error) {\n\tvar resp []*ServiceRegistrationListStub\n\tqm, err := s.client.query(\"/v1/services\", &resp, q)\n\tif err != nil {\n\t\treturn nil, qm, err\n\t}\n\treturn resp, qm, nil\n}", "func (ssrlr SQLServerRegistrationListResult) sQLServerRegistrationListResultPreparer(ctx context.Context) (*http.Request, error) {\n\tif ssrlr.NextLink == nil || len(to.String(ssrlr.NextLink)) < 1 {\n\t\treturn nil, nil\n\t}\n\treturn autorest.Prepare((&http.Request{}).WithContext(ctx),\n\t\tautorest.AsJSON(),\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(to.String(ssrlr.NextLink)))\n}", "func (s *SecurityAdminConfigurationListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &s.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *ConnectionMonitorListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &c.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func NewSQLServerRegistrationListResultIterator(page SQLServerRegistrationListResultPage) SQLServerRegistrationListResultIterator {\n\treturn SQLServerRegistrationListResultIterator{page: page}\n}", "func (d *DiscoveryConfiguration) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"appLocation\":\n\t\t\terr = unpopulate(val, \"AppLocation\", &d.AppLocation)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"centralServerVmId\":\n\t\t\terr = unpopulate(val, \"CentralServerVMID\", &d.CentralServerVMID)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"configurationType\":\n\t\t\terr = unpopulate(val, \"ConfigurationType\", &d.ConfigurationType)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"managedRgStorageAccountName\":\n\t\t\terr = unpopulate(val, \"ManagedRgStorageAccountName\", &d.ManagedRgStorageAccountName)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", d, err)\n\t\t}\n\t}\n\treturn nil\n}", "func UnmarshalClusterConfig(bytes []byte, opts ...MarshalOption) (ClusterConfig, error) {\n\tvar clusterConfig ClusterConfigV3\n\n\tif len(bytes) == 0 {\n\t\treturn nil, trace.BadParameter(\"missing resource data\")\n\t}\n\n\tcfg, err := CollectOptions(opts)\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\tif cfg.SkipValidation {\n\t\tif err := utils.FastUnmarshal(bytes, &clusterConfig); err != nil {\n\t\t\treturn nil, trace.BadParameter(err.Error())\n\t\t}\n\t} else {\n\t\terr = utils.UnmarshalWithSchema(GetClusterConfigSchema(\"\"), &clusterConfig, bytes)\n\t\tif err != nil {\n\t\t\treturn nil, trace.BadParameter(err.Error())\n\t\t}\n\t}\n\n\terr = clusterConfig.CheckAndSetDefaults()\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\n\tif cfg.ID != 0 {\n\t\tclusterConfig.SetResourceID(cfg.ID)\n\t}\n\tif !cfg.Expires.IsZero() {\n\t\tclusterConfig.SetExpiry(cfg.Expires)\n\t}\n\treturn &clusterConfig, nil\n}", "func (u *UsagesListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &u.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &u.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (client *Client) ListClusterMembers(request *ListClusterMembersRequest) (response *ListClusterMembersResponse, err error) {\n\tresponse = CreateListClusterMembersResponse()\n\terr = client.DoAction(request, response)\n\treturn\n}", "func (m *ManagerEffectiveConnectivityConfigurationListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", m, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"skipToken\":\n\t\t\terr = unpopulate(val, \"SkipToken\", &m.SkipToken)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &m.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", m, err)\n\t\t}\n\t}\n\treturn nil\n}", "func parseIPv6AddressInPxctlClusterList(output string, nodeCount int) []string {\n\toption := newIPv6ParserOption(\"ID\\t\", 2, nodeCount)\n\tp := newIPv6Parser([]parserOption{option})\n\treturn p.parse(output)\n}", "func (j *Regulations) UnmarshalJSON(input []byte) error {\n\tfs := fflib.NewFFLexer(input)\n\treturn j.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)\n}", "func (a *ApplicationSecurityGroupListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &a.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &a.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", a, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (u *Unmarshal) ScansList(skipOnHit bool, writeOnReturn bool) ([]byte, error) {\n\ts := u.service(skipOnHit, writeOnReturn)\n\traw, err := s.ScansList()\n\treturn raw, err\n}", "func UnmarshalList(toUnmarshal []Unmarshaler, data []byte) (int, error) {\n\ttotalBytes := 0\n\n\tfor _, tu := range toUnmarshal {\n\t\tbytesRead, err := tu.Unmarshal(data[totalBytes:])\n\t\tif err != nil {\n\t\t\treturn 0, err\n\t\t}\n\n\t\ttotalBytes += bytesRead\n\t}\n\n\treturn totalBytes, nil\n}", "func (p *ProviderInstanceListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &p.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &p.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", p, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *ConfigurationDiagnosticResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"networkSecurityGroupResult\":\n\t\t\terr = unpopulate(val, \"NetworkSecurityGroupResult\", &c.NetworkSecurityGroupResult)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"profile\":\n\t\t\terr = unpopulate(val, \"Profile\", &c.Profile)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (this *EnvoyFilter_ClusterMatch) UnmarshalJSON(b []byte) error {\n\treturn EnvoyFilterUnmarshaler.Unmarshal(bytes.NewReader(b), this)\n}", "func (s *SharedAccessSignatureAuthorizationRuleListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &s.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *CustomIPPrefixListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &c.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &c.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func ListOneRegistration(w http.ResponseWriter, r *http.Request) {\n\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\tregUUID := urlVars[\"uuid\"]\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\tur, err := auth.FindUserRegistration(regUUID, \"\", refStr)\n\tif err != nil {\n\n\t\tif err.Error() == \"not found\" {\n\t\t\terr := APIErrorNotFound(\"User registration\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\turb, err := json.MarshalIndent(ur, \"\", \" \")\n\tif err != nil {\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\trespondOK(w, urb)\n}", "func (u *UsageListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &u.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", u, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (s *spiff) UnmarshalMultiSource(source Source) ([]Node, error) {\n\tdata, err := source.Data()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn yaml.UnmarshalMulti(source.Name(), data)\n}", "func (c *FakeGoogleCloudPubSubSources) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.GoogleCloudPubSubSourceList, err error) {\n\tobj, err := c.Fake.\n\t\tInvokes(testing.NewListAction(googlecloudpubsubsourcesResource, googlecloudpubsubsourcesKind, c.ns, opts), &v1alpha1.GoogleCloudPubSubSourceList{})\n\n\tif obj == nil {\n\t\treturn nil, err\n\t}\n\n\tlabel, _, _ := testing.ExtractFromListOptions(opts)\n\tif label == nil {\n\t\tlabel = labels.Everything()\n\t}\n\tlist := &v1alpha1.GoogleCloudPubSubSourceList{ListMeta: obj.(*v1alpha1.GoogleCloudPubSubSourceList).ListMeta}\n\tfor _, item := range obj.(*v1alpha1.GoogleCloudPubSubSourceList).Items {\n\t\tif label.Matches(labels.Set(item.Labels)) {\n\t\t\tlist.Items = append(list.Items, item)\n\t\t}\n\t}\n\treturn list, err\n}", "func (client *RegistrationDefinitionsClient) listHandleResponse(resp *http.Response) (RegistrationDefinitionsClientListResponse, error) {\n\tresult := RegistrationDefinitionsClientListResponse{}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RegistrationDefinitionList); err != nil {\n\t\treturn RegistrationDefinitionsClientListResponse{}, err\n\t}\n\treturn result, nil\n}", "func List() ([]clusterapi.Cluster, error) {\n\tvar clusterList []clusterapi.Cluster\n\terr := utils.BrowseMetadataContent(clusterapi.ClusterMetadataPrefix, func(buf *bytes.Buffer) error {\n\t\tvar c clusterapi.Cluster\n\t\terr := gob.NewDecoder(buf).Decode(&c)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tclusterList = append(clusterList, c)\n\t\treturn nil\n\t})\n\treturn clusterList, err\n}", "func (s *SharedAccessAuthorizationRuleListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &s.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &s.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", s, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (su *StateUpdate) RemoveVehicleRegistrations(v ...*VehicleRegistration) *StateUpdate {\n\tids := make([]int, len(v))\n\tfor i := range v {\n\t\tids[i] = v[i].ID\n\t}\n\treturn su.RemoveVehicleRegistrationIDs(ids...)\n}", "func (instance *ClassicCluster) unsafeListMasters(inctx context.Context) (_ resources.IndexedListOfClusterNodes, _ fail.Error) {\n\tdefer elapsed(inctx, \"unsafeListMasters\")()\n\tctx, cancel := context.WithCancel(inctx)\n\tdefer cancel()\n\n\ttype result struct {\n\t\trTr resources.IndexedListOfClusterNodes\n\t\trErr fail.Error\n\t}\n\tchRes := make(chan result)\n\tgo func() {\n\t\tdefer close(chRes)\n\n\t\tgres, _ := func() (_ result, ferr fail.Error) {\n\t\t\tdefer fail.OnPanic(&ferr)\n\t\t\tlicn := make(resources.IndexedListOfClusterNodes)\n\n\t\t\tlinodes, xerr := instance.trueListMasters(ctx)\n\t\t\txerr = debug.InjectPlannedFail(xerr)\n\t\t\tif xerr != nil {\n\t\t\t\treturn result{licn, xerr}, xerr\n\t\t\t}\n\n\t\t\tfor ind, v := range linodes {\n\t\t\t\tlicn[uint(ind)] = &propertiesv3.ClusterNode{\n\t\t\t\t\tID: v.Core.ID,\n\t\t\t\t\tNumericalID: uint(ind),\n\t\t\t\t\tName: v.Core.Name,\n\t\t\t\t\tPublicIP: v.Networking.PublicIPv4,\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn result{licn, nil}, nil\n\t\t}()\n\t\tchRes <- gres\n\t}()\n\tselect {\n\tcase res := <-chRes:\n\t\treturn res.rTr, res.rErr\n\tcase <-ctx.Done():\n\t\treturn nil, fail.ConvertError(ctx.Err())\n\tcase <-inctx.Done():\n\t\treturn nil, fail.ConvertError(inctx.Err())\n\t}\n\n}", "func (v *MetricTagConfigurationMetricTypes) UnmarshalJSON(src []byte) error {\n\tvar value string\n\terr := json.Unmarshal(src, &value)\n\tif err != nil {\n\t\treturn err\n\t}\n\t*v = MetricTagConfigurationMetricTypes(value)\n\treturn nil\n}", "func (l *ListVPNServerConfigurationPolicyGroupsResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", l, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &l.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &l.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", l, err)\n\t\t}\n\t}\n\treturn nil\n}", "func makeClusterFromUserConfig(configJSON string) (*envoy.Cluster, error) {\n\tvar jsonFields map[string]*json.RawMessage\n\tif err := json.Unmarshal([]byte(configJSON), &jsonFields); err != nil {\n\t\tfmt.Println(\"Custom error\", err, configJSON)\n\t\treturn nil, err\n\t}\n\n\tvar c envoy.Cluster\n\n\tif _, ok := jsonFields[\"@type\"]; ok {\n\t\t// Type field is present so decode it as a types.Any\n\t\tvar any types.Any\n\t\terr := jsonpb.UnmarshalString(configJSON, &any)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// And then unmarshal the listener again...\n\t\terr = proto.Unmarshal(any.Value, &c)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t\t//return nil, err\n\t\t}\n\t\treturn &c, err\n\t}\n\n\t// No @type so try decoding as a straight listener.\n\terr := jsonpb.UnmarshalString(configJSON, &c)\n\treturn &c, err\n}", "func (cl ClusterList) clusterListPreparer(ctx context.Context) (*http.Request, error) {\n\tif cl.NextLink == nil || len(to.String(cl.NextLink)) < 1 {\n\t\treturn nil, nil\n\t}\n\treturn autorest.Prepare((&http.Request{}).WithContext(ctx),\n\t\tautorest.AsJSON(),\n\t\tautorest.AsGet(),\n\t\tautorest.WithBaseURL(to.String(cl.NextLink)))\n}", "func (suo *StateUpdateOne) RemoveVehicleRegistrations(v ...*VehicleRegistration) *StateUpdateOne {\n\tids := make([]int, len(v))\n\tfor i := range v {\n\t\tids[i] = v[i].ID\n\t}\n\treturn suo.RemoveVehicleRegistrationIDs(ids...)\n}", "func (c *gcLifecycle) Remove(cluster *v3.Cluster) (runtime.Object, error) {\n\tif err := c.waitForNodeRemoval(cluster); err != nil {\n\t\treturn cluster, err // ErrSkip if we still need to wait\n\t}\n\n\tRESTconfig := c.mgmt.RESTConfig\n\t// due to the large number of api calls, temporary raise the burst limit in order to reduce client throttling\n\tRESTconfig.Burst = 25\n\tdynamicClient, err := dynamic.NewForConfig(&RESTconfig)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdecodedMap := resource.GetClusterScopedTypes()\n\t//if map is empty, fall back to checking all Rancher types\n\tif len(decodedMap) == 0 {\n\t\tdecodedMap = resource.Get()\n\t}\n\tvar g errgroup.Group\n\n\tfor key := range decodedMap {\n\t\tactualKey := key // https://golang.org/doc/faq#closures_and_goroutines\n\t\tg.Go(func() error {\n\t\t\tobjList, err := dynamicClient.Resource(actualKey).List(context.TODO(), metav1.ListOptions{})\n\t\t\tif err != nil {\n\t\t\t\tif errors.IsNotFound(err) {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, obj := range objList.Items {\n\t\t\t\t_, err = cleanFinalizers(cluster.Name, &obj, dynamicClient.Resource(actualKey).Namespace(obj.GetNamespace()))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\t}\n\tif err = g.Wait(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (client IdentityClient) ListRegionSubscriptions(ctx context.Context, request ListRegionSubscriptionsRequest) (response ListRegionSubscriptionsResponse, err error) {\n\tvar ociResponse common.OCIResponse\n\tpolicy := common.NoRetryPolicy()\n\tif client.RetryPolicy() != nil {\n\t\tpolicy = *client.RetryPolicy()\n\t}\n\tif request.RetryPolicy() != nil {\n\t\tpolicy = *request.RetryPolicy()\n\t}\n\tociResponse, err = common.Retry(ctx, request, client.listRegionSubscriptions, policy)\n\tif err != nil {\n\t\tif ociResponse != nil {\n\t\t\tif httpResponse := ociResponse.HTTPResponse(); httpResponse != nil {\n\t\t\t\topcRequestId := httpResponse.Header.Get(\"opc-request-id\")\n\t\t\t\tresponse = ListRegionSubscriptionsResponse{RawResponse: httpResponse, OpcRequestId: &opcRequestId}\n\t\t\t} else {\n\t\t\t\tresponse = ListRegionSubscriptionsResponse{}\n\t\t\t}\n\t\t}\n\t\treturn\n\t}\n\tif convertedResponse, ok := ociResponse.(ListRegionSubscriptionsResponse); ok {\n\t\tresponse = convertedResponse\n\t} else {\n\t\terr = fmt.Errorf(\"failed to convert OCIResponse into ListRegionSubscriptionsResponse\")\n\t}\n\treturn\n}", "func (client *DevicesClient) listRegistrationKeyHandleResponse(resp *http.Response) (DevicesClientListRegistrationKeyResponse, error) {\n\tresult := DevicesClientListRegistrationKeyResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.DeviceRegistrationKey); err != nil {\n\t\treturn DevicesClientListRegistrationKeyResponse{}, err\n\t}\n\treturn result, nil\n}", "func (s *ClusterListener) ListNodes(inctx context.Context, in *protocol.Reference) (_ *protocol.ClusterNodeListResponse, err error) {\n\tdefer fail.OnExitConvertToGRPCStatus(inctx, &err)\n\tdefer fail.OnExitWrapError(inctx, &err, \"cannot list cluster nodes\")\n\n\tif s == nil {\n\t\treturn nil, fail.InvalidInstanceError()\n\t}\n\tif inctx == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"inctx\")\n\t}\n\tif in == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"in\")\n\t}\n\n\tref, _ := srvutils.GetReference(in)\n\tif ref == \"\" {\n\t\treturn nil, fail.InvalidRequestError(\"cluster name is missing\")\n\t}\n\n\tjob, err := PrepareJob(inctx, in.GetTenantId(), fmt.Sprintf(\"/cluster/%s/nodes/list\", ref))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer job.Close()\n\n\thandler := handlers.NewClusterHandler(job)\n\n\tlist, xerr := handler.ListNodes(ref)\n\tif xerr != nil {\n\t\treturn nil, xerr\n\t}\n\n\tout := &protocol.ClusterNodeListResponse{}\n\tout.Nodes = make([]*protocol.Host, 0, len(list))\n\tfor _, v := range list {\n\t\tout.Nodes = append(out.Nodes, &protocol.Host{\n\t\t\tId: v.ID,\n\t\t\tName: v.Name,\n\t\t})\n\t}\n\treturn out, nil\n}", "func (v *NotificationList) UnmarshalJSON(data []byte) error {\n\tr := jlexer.Lexer{Data: data}\n\teasyjson9806e1DecodeGithubComKonstantinProninEmailSendingServicePkgModel(&r, v)\n\treturn r.Error()\n}", "func (e *EffectiveNetworkSecurityGroupListResult) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", e, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"nextLink\":\n\t\t\terr = unpopulate(val, \"NextLink\", &e.NextLink)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"value\":\n\t\t\terr = unpopulate(val, \"Value\", &e.Value)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", e, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (c *CentralServerFullResourceNames) UnmarshalJSON(data []byte) error {\n\tvar rawMsg map[string]json.RawMessage\n\tif err := json.Unmarshal(data, &rawMsg); err != nil {\n\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t}\n\tfor key, val := range rawMsg {\n\t\tvar err error\n\t\tswitch key {\n\t\tcase \"availabilitySetName\":\n\t\t\terr = unpopulate(val, \"AvailabilitySetName\", &c.AvailabilitySetName)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"loadBalancer\":\n\t\t\terr = unpopulate(val, \"LoadBalancer\", &c.LoadBalancer)\n\t\t\tdelete(rawMsg, key)\n\t\tcase \"virtualMachines\":\n\t\t\terr = unpopulate(val, \"VirtualMachines\", &c.VirtualMachines)\n\t\t\tdelete(rawMsg, key)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unmarshalling type %T: %v\", c, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (a *Client) V2DeregisterCluster(ctx context.Context, params *V2DeregisterClusterParams) (*V2DeregisterClusterNoContent, error) {\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"v2DeregisterCluster\",\n\t\tMethod: \"DELETE\",\n\t\tPathPattern: \"/v2/clusters/{cluster_id}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &V2DeregisterClusterReader{formats: a.formats},\n\t\tAuthInfo: a.authInfo,\n\t\tContext: ctx,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*V2DeregisterClusterNoContent), nil\n\n}" ]
[ "0.66339713", "0.604364", "0.5504917", "0.54388016", "0.5357876", "0.51679134", "0.50562996", "0.4886906", "0.4851879", "0.47381872", "0.46461502", "0.4645939", "0.4577855", "0.44573668", "0.44456002", "0.44417658", "0.44191048", "0.44122767", "0.44003344", "0.43986234", "0.43641883", "0.43611822", "0.4344848", "0.4330224", "0.43296033", "0.43288022", "0.4319533", "0.4307227", "0.42925552", "0.427904", "0.42732996", "0.4250939", "0.42417195", "0.4223877", "0.42223474", "0.4221174", "0.42059734", "0.41881955", "0.41866863", "0.41787675", "0.41747266", "0.4171114", "0.4158869", "0.4149742", "0.41493064", "0.41479358", "0.4143124", "0.4135559", "0.41324693", "0.41292897", "0.41290325", "0.41278517", "0.41267613", "0.4126487", "0.41251895", "0.41191745", "0.41169274", "0.41058746", "0.4097119", "0.40922913", "0.40878105", "0.40853444", "0.40783527", "0.40680778", "0.40650734", "0.40608814", "0.4057621", "0.40531278", "0.4050043", "0.40481097", "0.4044016", "0.40425092", "0.40418583", "0.40376857", "0.40352738", "0.40350008", "0.4031369", "0.40292016", "0.40210938", "0.40207043", "0.40179577", "0.40172154", "0.4012871", "0.40122268", "0.4008232", "0.40062627", "0.40050706", "0.39988708", "0.39988035", "0.39980036", "0.39969397", "0.39923656", "0.39889646", "0.39841747", "0.39825702", "0.39738277", "0.39723265", "0.3969046", "0.39689273", "0.39573282" ]
0.8514573
0
readClusterRegistrationList reads list of values of the ''cluster_registration' type from the given iterator.
func readClusterRegistrationList(iterator *jsoniter.Iterator) []*ClusterRegistration { list := []*ClusterRegistration{} for iterator.ReadArray() { item := readClusterRegistration(iterator) list = append(list, item) } return list }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func UnmarshalClusterRegistrationList(source interface{}) (items []*ClusterRegistration, err error) {\n\titerator, err := helpers.NewIterator(source)\n\tif err != nil {\n\t\treturn\n\t}\n\titems = readClusterRegistrationList(iterator)\n\terr = iterator.Error\n\treturn\n}", "func writeClusterRegistrationList(list []*ClusterRegistration, stream *jsoniter.Stream) {\n\tstream.WriteArrayStart()\n\tfor i, value := range list {\n\t\tif i > 0 {\n\t\t\tstream.WriteMore()\n\t\t}\n\t\twriteClusterRegistration(value, stream)\n\t}\n\tstream.WriteArrayEnd()\n}", "func MarshalClusterRegistrationList(list []*ClusterRegistration, writer io.Writer) error {\n\tstream := helpers.NewStream(writer)\n\twriteClusterRegistrationList(list, stream)\n\tstream.Flush()\n\treturn stream.Error\n}", "func (c *cache) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif c == nil || c.controllerCache == nil {\n\t\treturn nil, fmt.Errorf(cacheNotInitializedErr)\n\t}\n\tappRegList := &storkv1alpha1.ApplicationRegistrationList{}\n\tif err := c.controllerCache.List(context.Background(), appRegList); err != nil {\n\t\treturn nil, err\n\t}\n\treturn appRegList, nil\n}", "func GatherClusterImageRegistry(g *Gatherer, c chan<- gatherResult) {\n\tdefer close(c)\n\tregistryClient, err := imageregistryv1client.NewForConfig(g.gatherKubeConfig)\n\tif err != nil {\n\t\tc <- gatherResult{nil, []error{err}}\n\t\treturn\n\t}\n\tgatherKubeClient, err := kubernetes.NewForConfig(g.gatherProtoKubeConfig)\n\tif err != nil {\n\t\tc <- gatherResult{nil, []error{err}}\n\t\treturn\n\t}\n\trecords, errors := gatherClusterImageRegistry(g.ctx, registryClient.ImageregistryV1(), gatherKubeClient.CoreV1())\n\tc <- gatherResult{records, errors}\n}", "func NewSQLServerRegistrationListResultIterator(page SQLServerRegistrationListResultPage) SQLServerRegistrationListResultIterator {\n\treturn SQLServerRegistrationListResultIterator{page: page}\n}", "func List() ([]clusterapi.Cluster, error) {\n\tvar clusterList []clusterapi.Cluster\n\terr := utils.BrowseMetadataContent(clusterapi.ClusterMetadataPrefix, func(buf *bytes.Buffer) error {\n\t\tvar c clusterapi.Cluster\n\t\terr := gob.NewDecoder(buf).Decode(&c)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tclusterList = append(clusterList, c)\n\t\treturn nil\n\t})\n\treturn clusterList, err\n}", "func clusterList() []string {\n\tif c := envy.String(\"DQLITED_CLUSTER\"); c != \"\" {\n\t\treturn strings.Split(c, \",\")\n\t}\n\treturn defaultCluster\n}", "func ClusterInstallationsFromReader(reader io.Reader) ([]*ClusterInstallation, error) {\n\tclusterInstallations := []*ClusterInstallation{}\n\tdecoder := json.NewDecoder(reader)\n\n\terr := decoder.Decode(&clusterInstallations)\n\tif err != nil && err != io.EOF {\n\t\treturn nil, err\n\t}\n\n\treturn clusterInstallations, nil\n}", "func (c *ClusterResourceClient) List(ctx context.Context, opts metav1.ListOptions) (*metav1.PartialObjectMetadataList, error) {\n\treturn c.clientCache.ClusterOrDie(logicalcluster.Wildcard).Resource(c.resource).List(ctx, opts)\n}", "func (iter *SQLServerRegistrationListResultIterator) Next() error {\n\treturn iter.NextWithContext(context.Background())\n}", "func (c *MultiClusterController) List(clusterName string, opts ...client.ListOption) (interface{}, error) {\n\tcluster := c.GetCluster(clusterName)\n\tif cluster == nil {\n\t\treturn nil, errors.NewClusterNotFound(clusterName)\n\t}\n\tinstanceList := utilscheme.Scheme.NewObjectList(c.objectType)\n\tdelegatingClient, err := cluster.GetDelegatingClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = delegatingClient.List(context.TODO(), instanceList, opts...)\n\treturn instanceList, err\n}", "func (adm Admin) ListClusterInfo(cluster string) (string, error) {\n\t// make sure the cluster is already setup\n\tif ok, err := adm.isClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tbuilder := KeyBuilder{cluster}\n\tisPath := builder.idealStates()\n\tinstancesPath := builder.instances()\n\n\tresources, err := adm.zkClient.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinstances, err := adm.zkClient.Children(instancesPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Existing resources in cluster \" + cluster + \":\\n\")\n\n\tfor _, r := range resources {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\tbuffer.WriteString(\"\\nInstances in cluster \" + cluster + \":\\n\")\n\tfor _, i := range instances {\n\t\tbuffer.WriteString(\" \" + i + \"\\n\")\n\t}\n\treturn buffer.String(), nil\n}", "func (c *Client) ListApplicationRegistrations() (*storkv1alpha1.ApplicationRegistrationList, error) {\n\tif err := c.initClient(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn c.stork.StorkV1alpha1().ApplicationRegistrations().List(context.TODO(), metav1.ListOptions{})\n\n}", "func ListClusterUserCredentials(client autorest.Client, urlParameters map[string]interface{}, apiVersion string) containerservice.CredentialResults {\r\n\tqueryParameters := map[string]interface{}{\r\n\t\t\"api-version\": apiVersion,\r\n\t}\r\n\tpreparerDecorators := []autorest.PrepareDecorator{\r\n\t\tautorest.AsContentType(\"application/json; charset=utf-8\"),\r\n\t\tautorest.WithMethod(\"POST\"),\r\n\t\tautorest.WithBaseURL(azure.PublicCloud.ResourceManagerEndpoint),\r\n\t\tautorest.WithPathParameters(\r\n\t\t\t\"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterUserCredential\",\r\n\t\t\turlParameters,\r\n\t\t),\r\n\t\tautorest.WithQueryParameters(queryParameters),\r\n\t}\r\n\r\n\tpreparer := autorest.CreatePreparer(preparerDecorators...)\r\n\treq, err := preparer.Prepare((&http.Request{}).WithContext(context.Background()))\r\n\r\n\tif err != nil {\r\n\t\tpanic(err)\r\n\t}\r\n\r\n\tfmt.Println(req.URL)\r\n\r\n\tresp, err := client.Do(req)\r\n\tif err != nil {\r\n\t\tpanic(err)\r\n\t}\r\n\terr = autorest.Respond(\r\n\t\tresp,\r\n\t\tclient.ByInspecting(),\r\n\t)\r\n\r\n\tcontent, err := ioutil.ReadAll(resp.Body)\r\n\r\n\tvar kubeconfigs containerservice.CredentialResults\r\n\tjson.Unmarshal(content, &kubeconfigs)\r\n\r\n\treturn kubeconfigs\r\n}", "func (svc ServerlessClusterService) List(ctx context.Context) (*[]models.Cluster, *Response, error) {\n\tvar clusterList []models.Cluster\n\tgraphqlRequest := models.GraphqlRequest{\n\t\tName: \"clusters\",\n\t\tOperation: models.Query,\n\t\tInput: nil,\n\t\tArgs: models.ClusterListInput{\n\t\t\tProductType: models.Starter,\n\t\t},\n\t\tResponse: clusterList,\n\t}\n\treq, err := svc.client.NewRequest(&graphqlRequest)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tresp, err := svc.client.Do(ctx, req, &clusterList)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn &clusterList, resp, err\n}", "func (c starterClusterServiceOp) List(ctx context.Context) (*[]models.Cluster, *Response, error) {\n\tvar clusterList []models.Cluster\n\tgraphqlRequest := models.GraphqlRequest{\n\t\tName: \"clusters\",\n\t\tOperation: models.Query,\n\t\tInput: clusterList,\n\t\tArgs: models.ClusterListInput{\n\t\t\tProductType: models.Starter,\n\t\t},\n\t\tResponse: clusterList,\n\t}\n\treq, err := c.client.NewRequest(&graphqlRequest)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tresp, err := c.client.Do(ctx, req, &clusterList)\n\tif err != nil {\n\t\treturn nil, resp, err\n\t}\n\n\treturn &clusterList, resp, err\n}", "func (instance *ClassicCluster) unsafeListNodes(inctx context.Context) (_ resources.IndexedListOfClusterNodes, _ fail.Error) {\n\tctx, cancel := context.WithCancel(inctx)\n\tdefer cancel()\n\n\ttype result struct {\n\t\trTr resources.IndexedListOfClusterNodes\n\t\trErr fail.Error\n\t}\n\tchRes := make(chan result)\n\tgo func() {\n\t\tdefer close(chRes)\n\t\tgres, _ := func() (_ result, ferr fail.Error) {\n\t\t\tdefer fail.OnPanic(&ferr)\n\n\t\t\tlicn := make(resources.IndexedListOfClusterNodes)\n\n\t\t\tlinodes, xerr := instance.trueListNodes(ctx)\n\t\t\txerr = debug.InjectPlannedFail(xerr)\n\t\t\tif xerr != nil {\n\t\t\t\treturn result{licn, xerr}, xerr\n\t\t\t}\n\n\t\t\tfor ind, v := range linodes {\n\t\t\t\tlicn[uint(ind)] = &propertiesv3.ClusterNode{\n\t\t\t\t\tID: v.Core.ID,\n\t\t\t\t\tNumericalID: uint(ind),\n\t\t\t\t\tName: v.Core.Name,\n\t\t\t\t\tPublicIP: v.Networking.PublicIPv4,\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn result{licn, nil}, nil\n\t\t}()\n\t\tchRes <- gres\n\t}()\n\tselect {\n\tcase res := <-chRes:\n\t\treturn res.rTr, res.rErr\n\tcase <-ctx.Done():\n\t\treturn nil, fail.ConvertError(ctx.Err())\n\tcase <-inctx.Done():\n\t\treturn nil, fail.ConvertError(inctx.Err())\n\t}\n}", "func (m *Manager) GetClusterList() ([]Cluster, error) {\n\tnames, err := m.specManager.List()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar clusters = []Cluster{}\n\n\tfor _, name := range names {\n\t\tmetadata, err := m.meta(name)\n\t\tif err != nil && !errors.Is(perrs.Cause(err), meta.ErrValidate) &&\n\t\t\t!errors.Is(perrs.Cause(err), spec.ErrNoTiSparkMaster) {\n\t\t\treturn nil, perrs.Trace(err)\n\t\t}\n\n\t\tbase := metadata.GetBaseMeta()\n\n\t\tclusters = append(clusters, Cluster{\n\t\t\tName: name,\n\t\t\tUser: base.User,\n\t\t\tVersion: base.Version,\n\t\t\tPath: m.specManager.Path(name),\n\t\t\tPrivateKey: m.specManager.Path(name, \"ssh\", \"id_rsa\"),\n\t\t})\n\t}\n\n\treturn clusters, nil\n}", "func (iter *ClusterListResultIterator) Next() error {\n\treturn iter.NextWithContext(context.Background())\n}", "func (adm Admin) ListClusterInfo(cluster string) (string, error) {\n\tconn := newConnection(adm.ZkSvr)\n\terr := conn.Connect()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer conn.Disconnect()\n\n\t// make sure the cluster is already setup\n\tif ok, err := conn.IsClusterSetup(cluster); !ok || err != nil {\n\t\treturn \"\", ErrClusterNotSetup\n\t}\n\n\tkeys := KeyBuilder{cluster}\n\tisPath := keys.idealStates()\n\tinstancesPath := keys.instances()\n\n\tresources, err := conn.Children(isPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tinstances, err := conn.Children(instancesPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar buffer bytes.Buffer\n\tbuffer.WriteString(\"Existing resources in cluster \" + cluster + \":\\n\")\n\n\tfor _, r := range resources {\n\t\tbuffer.WriteString(\" \" + r + \"\\n\")\n\t}\n\n\tbuffer.WriteString(\"\\nInstances in cluster \" + cluster + \":\\n\")\n\tfor _, i := range instances {\n\t\tbuffer.WriteString(\" \" + i + \"\\n\")\n\t}\n\treturn buffer.String(), nil\n}", "func (client DeploymentsClient) ListForClusterSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}", "func NewRegistrationDefinitionListIterator(page RegistrationDefinitionListPage) RegistrationDefinitionListIterator {\n\treturn RegistrationDefinitionListIterator{page: page}\n}", "func (s *federatedClusterLister) List(selector labels.Selector) (ret []*federation.FederatedCluster, err error) {\n\terr = cache.ListAll(s.indexer, selector, func(m interface{}) {\n\t\tret = append(ret, m.(*federation.FederatedCluster))\n\t})\n\treturn ret, err\n}", "func (*OktetoClusterHelper) List() (map[string]string, error) {\n\treturn nil, ErrNotImplemented\n}", "func (c *clusterCache) listResources(ctx context.Context, resClient dynamic.ResourceInterface, callback func(*pager.ListPager) error) (string, error) {\n\tif err := c.listSemaphore.Acquire(ctx, 1); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer c.listSemaphore.Release(1)\n\tvar retryCount int64 = 0\n\tresourceVersion := \"\"\n\tlistPager := pager.New(func(ctx context.Context, opts metav1.ListOptions) (runtime.Object, error) {\n\t\tvar res *unstructured.UnstructuredList\n\t\tvar listRetry wait.Backoff\n\n\t\tif c.listRetryUseBackoff {\n\t\t\tlistRetry = retry.DefaultBackoff\n\t\t} else {\n\t\t\tlistRetry = retry.DefaultRetry\n\t\t}\n\n\t\tlistRetry.Steps = int(c.listRetryLimit)\n\t\terr := retry.OnError(listRetry, c.listRetryFunc, func() error {\n\t\t\tvar ierr error\n\t\t\tres, ierr = resClient.List(ctx, opts)\n\t\t\tif ierr != nil {\n\t\t\t\t// Log out a retry\n\t\t\t\tif c.listRetryLimit > 1 && c.listRetryFunc(ierr) {\n\t\t\t\t\tretryCount += 1\n\t\t\t\t\tc.log.Info(fmt.Sprintf(\"Error while listing resources: %v (try %d/%d)\", ierr, retryCount, c.listRetryLimit))\n\t\t\t\t}\n\t\t\t\treturn ierr\n\t\t\t}\n\t\t\tresourceVersion = res.GetResourceVersion()\n\t\t\treturn nil\n\t\t})\n\t\treturn res, err\n\t})\n\tlistPager.PageBufferSize = c.listPageBufferSize\n\tlistPager.PageSize = c.listPageSize\n\n\treturn resourceVersion, callback(listPager)\n}", "func provisionerList(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tprovs, err := provision.Registry()\n\tif err != nil {\n\t\treturn err\n\t}\n\tinfo := make([]provisionerInfo, len(provs))\n\tfor i, p := range provs {\n\t\tinfo[i].Name = p.GetName()\n\t\tif clusterProv, ok := p.(cluster.ClusteredProvisioner); ok {\n\t\t\tinfo[i].ClusterHelp = clusterProv.ClusterHelp()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(info)\n}", "func (instance *ClassicCluster) unsafeListMasters(inctx context.Context) (_ resources.IndexedListOfClusterNodes, _ fail.Error) {\n\tdefer elapsed(inctx, \"unsafeListMasters\")()\n\tctx, cancel := context.WithCancel(inctx)\n\tdefer cancel()\n\n\ttype result struct {\n\t\trTr resources.IndexedListOfClusterNodes\n\t\trErr fail.Error\n\t}\n\tchRes := make(chan result)\n\tgo func() {\n\t\tdefer close(chRes)\n\n\t\tgres, _ := func() (_ result, ferr fail.Error) {\n\t\t\tdefer fail.OnPanic(&ferr)\n\t\t\tlicn := make(resources.IndexedListOfClusterNodes)\n\n\t\t\tlinodes, xerr := instance.trueListMasters(ctx)\n\t\t\txerr = debug.InjectPlannedFail(xerr)\n\t\t\tif xerr != nil {\n\t\t\t\treturn result{licn, xerr}, xerr\n\t\t\t}\n\n\t\t\tfor ind, v := range linodes {\n\t\t\t\tlicn[uint(ind)] = &propertiesv3.ClusterNode{\n\t\t\t\t\tID: v.Core.ID,\n\t\t\t\t\tNumericalID: uint(ind),\n\t\t\t\t\tName: v.Core.Name,\n\t\t\t\t\tPublicIP: v.Networking.PublicIPv4,\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn result{licn, nil}, nil\n\t\t}()\n\t\tchRes <- gres\n\t}()\n\tselect {\n\tcase res := <-chRes:\n\t\treturn res.rTr, res.rErr\n\tcase <-ctx.Done():\n\t\treturn nil, fail.ConvertError(ctx.Err())\n\tcase <-inctx.Done():\n\t\treturn nil, fail.ConvertError(inctx.Err())\n\t}\n\n}", "func GetClusterCIDRs(lister configlistersv1.NetworkLister, recorder events.Recorder) ([]string, error) {\n\tnetwork, err := lister.Get(\"cluster\")\n\tif errors.IsNotFound(err) {\n\t\trecorder.Warningf(\"ObserveRestrictedCIDRFailed\", \"Required networks.%s/cluster not found\", configv1.GroupName)\n\t\treturn nil, nil\n\t}\n\tif err != nil {\n\t\trecorder.Warningf(\"ObserveRestrictedCIDRFailed\", \"error getting networks.%s/cluster: %v\", configv1.GroupName, err)\n\t\treturn nil, err\n\t}\n\n\tif len(network.Status.ClusterNetwork) == 0 {\n\t\trecorder.Warningf(\"ObserveClusterCIDRFailed\", \"Required status.clusterNetwork field is not set in networks.%s/cluster\", configv1.GroupName)\n\t\treturn nil, fmt.Errorf(\"networks.%s/cluster: status.clusterNetwork not found\", configv1.GroupName)\n\t}\n\n\tvar clusterCIDRs []string\n\tfor i, clusterNetwork := range network.Status.ClusterNetwork {\n\t\tif len(clusterNetwork.CIDR) == 0 {\n\t\t\trecorder.Warningf(\"ObserveRestrictedCIDRFailed\", \"Required status.clusterNetwork[%d].cidr field is not set in networks.%s/cluster\", i, configv1.GroupName)\n\t\t\treturn nil, fmt.Errorf(\"networks.%s/cluster: status.clusterNetwork[%d].cidr not found\", configv1.GroupName, i)\n\t\t}\n\t\tclusterCIDRs = append(clusterCIDRs, clusterNetwork.CIDR)\n\t}\n\t// TODO fallback to podCIDR? is that still a thing?\n\treturn clusterCIDRs, nil\n}", "func (s *Services) List(q *QueryOptions) ([]*ServiceRegistrationListStub, *QueryMeta, error) {\n\tvar resp []*ServiceRegistrationListStub\n\tqm, err := s.client.query(\"/v1/services\", &resp, q)\n\tif err != nil {\n\t\treturn nil, qm, err\n\t}\n\treturn resp, qm, nil\n}", "func (f *fileCredentialCache) List() []*AuthEntry {\n\tregistryCache := f.init()\n\n\t// optimize allocation for copy\n\tentries := make([]*AuthEntry, 0, len(registryCache.Registries))\n\n\tfor _, entry := range registryCache.Registries {\n\t\tentries = append(entries, entry)\n\t}\n\n\treturn entries\n}", "func ExampleReplicationStorageClassificationsClient_NewListByReplicationFabricsPager() {\n\tcred, err := azidentity.NewDefaultAzureCredential(nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to obtain a credential: %v\", err)\n\t}\n\tctx := context.Background()\n\tclientFactory, err := armrecoveryservicessiterecovery.NewClientFactory(\"<subscription-id>\", cred, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create client: %v\", err)\n\t}\n\tpager := clientFactory.NewReplicationStorageClassificationsClient().NewListByReplicationFabricsPager(\"vault1\", \"resourceGroupPS1\", \"2a48e3770ac08aa2be8bfbd94fcfb1cbf2dcc487b78fb9d3bd778304441b06a0\", nil)\n\tfor pager.More() {\n\t\tpage, err := pager.NextPage(ctx)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to advance page: %v\", err)\n\t\t}\n\t\tfor _, v := range page.Value {\n\t\t\t// You could use page here. We use blank identifier for just demo purposes.\n\t\t\t_ = v\n\t\t}\n\t\t// If the HTTP response code is 200 as defined in example definition, your page structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes.\n\t\t// page.StorageClassificationCollection = armrecoveryservicessiterecovery.StorageClassificationCollection{\n\t\t// \tValue: []*armrecoveryservicessiterecovery.StorageClassification{\n\t\t// \t\t{\n\t\t// \t\t\tName: to.Ptr(\"8891569e-aaef-4a46-a4a0-78c14f2d7b09\"),\n\t\t// \t\t\tType: to.Ptr(\"Microsoft.RecoveryServices/vaults/replicationFabrics/replicationStorageClassifications\"),\n\t\t// \t\t\tID: to.Ptr(\"/Subscriptions/9112a37f-0f3e-46ec-9c00-060c6edca071/resourceGroups/resourceGroupPS1/providers/Microsoft.RecoveryServices/vaults/vault1/replicationFabrics/2a48e3770ac08aa2be8bfbd94fcfb1cbf2dcc487b78fb9d3bd778304441b06a0/replicationStorageClassifications/8891569e-aaef-4a46-a4a0-78c14f2d7b09\"),\n\t\t// \t\t\tProperties: &armrecoveryservicessiterecovery.StorageClassificationProperties{\n\t\t// \t\t\t\tFriendlyName: to.Ptr(\"testStorageClassification\"),\n\t\t// \t\t\t},\n\t\t// \t}},\n\t\t// }\n\t}\n}", "func (w *ClusterDynamicClient) List(opts metav1.ListOptions) (*unstructured.UnstructuredList, error) {\n\treturn w.dClient.Resource(w.resource).Namespace(w.namespace).List(w.ctx, opts)\n}", "func RotateCluster(cluster *model.Cluster, logger *logrus.Entry, rotatorMetadata *RotatorMetadata) (*RotatorMetadata, error) {\n\tclientset, err := getk8sClientset(cluster)\n\tif err != nil {\n\t\treturn rotatorMetadata, err\n\t}\n\n\tif rotatorMetadata.MasterGroups == nil && rotatorMetadata.WorkerGroups == nil {\n\t\terr = rotatorMetadata.GetSetAutoscalingGroups(cluster)\n\t\tif err != nil {\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\t}\n\n\tfor index, masterASG := range rotatorMetadata.MasterGroups {\n\t\tlogger.Infof(\"The autoscaling group %s has %d instance(s)\", masterASG.Name, masterASG.DesiredCapacity)\n\n\t\terr = MasterNodeRotation(cluster, &masterASG, clientset, logger)\n\t\tif err != nil {\n\t\t\trotatorMetadata.MasterGroups[index] = masterASG\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\trotatorMetadata.MasterGroups[index] = masterASG\n\n\t\tlogger.Infof(\"Checking that all %d nodes are running...\", masterASG.DesiredCapacity)\n\t\terr = FinalCheck(&masterASG, clientset, logger)\n\t\tif err != nil {\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\tlogger.Infof(\"ASG %s rotated successfully.\", masterASG.Name)\n\t}\n\n\tfor index, workerASG := range rotatorMetadata.WorkerGroups {\n\t\tlogger.Infof(\"The autoscaling group %s has %d instance(s)\", workerASG.Name, workerASG.DesiredCapacity)\n\n\t\terr = WorkerNodeRotation(cluster, &workerASG, clientset, logger)\n\t\tif err != nil {\n\t\t\trotatorMetadata.WorkerGroups[index] = workerASG\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\trotatorMetadata.WorkerGroups[index] = workerASG\n\n\t\tlogger.Infof(\"Checking that all %d nodes are running...\", workerASG.DesiredCapacity)\n\t\terr = FinalCheck(&workerASG, clientset, logger)\n\t\tif err != nil {\n\t\t\treturn rotatorMetadata, err\n\t\t}\n\n\t\tlogger.Infof(\"ASG %s rotated successfully.\", workerASG.Name)\n\t}\n\n\tlogger.Info(\"All ASGs rotated successfully\")\n\treturn rotatorMetadata, nil\n}", "func getClusterNodeIPs(clientset client.Interface) ([]string, error) {\n\tpreferredAddressTypes := []v1.NodeAddressType{\n\t\tv1.NodeExternalIP,\n\t\tv1.NodeInternalIP,\n\t}\n\tnodeList, err := clientset.CoreV1().Nodes().List(metav1.ListOptions{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnodeAddresses := []string{}\n\tfor _, node := range nodeList.Items {\n\tOuterLoop:\n\t\tfor _, addressType := range preferredAddressTypes {\n\t\t\tfor _, address := range node.Status.Addresses {\n\t\t\t\tif address.Type == addressType {\n\t\t\t\t\tnodeAddresses = append(nodeAddresses, address.Address)\n\t\t\t\t\tbreak OuterLoop\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nodeAddresses, nil\n}", "func (c *FakeDaskClusters) List(ctx context.Context, opts v1.ListOptions) (result *kubernetesdaskorgv1.DaskClusterList, err error) {\n\tobj, err := c.Fake.\n\t\tInvokes(testing.NewListAction(daskclustersResource, daskclustersKind, c.ns, opts), &kubernetesdaskorgv1.DaskClusterList{})\n\n\tif obj == nil {\n\t\treturn nil, err\n\t}\n\n\tlabel, _, _ := testing.ExtractFromListOptions(opts)\n\tif label == nil {\n\t\tlabel = labels.Everything()\n\t}\n\tlist := &kubernetesdaskorgv1.DaskClusterList{ListMeta: obj.(*kubernetesdaskorgv1.DaskClusterList).ListMeta}\n\tfor _, item := range obj.(*kubernetesdaskorgv1.DaskClusterList).Items {\n\t\tif label.Matches(labels.Set(item.Labels)) {\n\t\t\tlist.Items = append(list.Items, item)\n\t\t}\n\t}\n\treturn list, err\n}", "func (client DeploymentsClient) ListForCluster(ctx context.Context, resourceGroupName string, serviceName string, version []string) (result DeploymentResourceCollectionPage, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"/DeploymentsClient.ListForCluster\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.drc.Response.Response != nil {\n\t\t\t\tsc = result.drc.Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.fn = client.listForClusterNextResults\n\treq, err := client.ListForClusterPreparer(ctx, resourceGroupName, serviceName, version)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListForClusterSender(req)\n\tif err != nil {\n\t\tresult.drc.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.drc, err = client.ListForClusterResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"ListForCluster\", resp, \"Failure responding to request\")\n\t\treturn\n\t}\n\tif result.drc.hasNextLink() && result.drc.IsEmpty() {\n\t\terr = result.NextWithContext(ctx)\n\t\treturn\n\t}\n\n\treturn\n}", "func (client DeploymentsClient) listForClusterNextResults(ctx context.Context, lastResults DeploymentResourceCollection) (result DeploymentResourceCollection, err error) {\n\treq, err := lastResults.deploymentResourceCollectionPreparer(ctx)\n\tif err != nil {\n\t\treturn result, autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"listForClusterNextResults\", nil, \"Failure preparing next results request\")\n\t}\n\tif req == nil {\n\t\treturn\n\t}\n\tresp, err := client.ListForClusterSender(req)\n\tif err != nil {\n\t\tresult.Response = autorest.Response{Response: resp}\n\t\treturn result, autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"listForClusterNextResults\", resp, \"Failure sending next results request\")\n\t}\n\tresult, err = client.ListForClusterResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"appplatform.DeploymentsClient\", \"listForClusterNextResults\", resp, \"Failure responding to next results request\")\n\t}\n\treturn\n}", "func (m *Manager) ListCluster() error {\n\tclusters, err := m.GetClusterList()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tswitch m.logger.GetDisplayMode() {\n\tcase logprinter.DisplayModeJSON:\n\t\tclusterObj := struct {\n\t\t\tClusters []Cluster `json:\"clusters\"`\n\t\t}{\n\t\t\tClusters: clusters,\n\t\t}\n\t\tdata, err := json.Marshal(clusterObj)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(string(data))\n\tdefault:\n\t\tclusterTable := [][]string{\n\t\t\t// Header\n\t\t\t{\"Name\", \"User\", \"Version\", \"Path\", \"PrivateKey\"},\n\t\t}\n\t\tfor _, v := range clusters {\n\t\t\tclusterTable = append(clusterTable, []string{\n\t\t\t\tv.Name,\n\t\t\t\tv.User,\n\t\t\t\tv.Version,\n\t\t\t\tv.Path,\n\t\t\t\tv.PrivateKey,\n\t\t\t})\n\t\t}\n\t\ttui.PrintTable(clusterTable, true)\n\t}\n\treturn nil\n}", "func (api *clusterAPI) List(ctx context.Context, opts *api.ListWatchOptions) ([]*Cluster, error) {\n\tvar objlist []*Cluster\n\tobjs, err := api.ct.List(\"Cluster\", ctx, opts)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, obj := range objs {\n\t\tswitch tp := obj.(type) {\n\t\tcase *Cluster:\n\t\t\teobj := obj.(*Cluster)\n\t\t\tobjlist = append(objlist, eobj)\n\t\tdefault:\n\t\t\tlog.Fatalf(\"Got invalid object type %v while looking for Cluster\", tp)\n\t\t}\n\t}\n\n\treturn objlist, nil\n}", "func ExampleClustersClient_ListByResourceGroup() {\n\tcred, err := azidentity.NewDefaultAzureCredential(nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to obtain a credential: %v\", err)\n\t}\n\tctx := context.Background()\n\tclientFactory, err := armservicefabric.NewClientFactory(\"<subscription-id>\", cred, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create client: %v\", err)\n\t}\n\tres, err := clientFactory.NewClustersClient().ListByResourceGroup(ctx, \"resRg\", nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to finish the request: %v\", err)\n\t}\n\t// You could use response here. We use blank identifier for just demo purposes.\n\t_ = res\n\t// If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes.\n\t// res.ClusterListResult = armservicefabric.ClusterListResult{\n\t// \tValue: []*armservicefabric.Cluster{\n\t// \t\t{\n\t// \t\t\tName: to.Ptr(\"myCluster\"),\n\t// \t\t\tType: to.Ptr(\"Microsoft.ServiceFabric/clusters\"),\n\t// \t\t\tEtag: to.Ptr(\"W/\\\"636462502169240745\\\"\"),\n\t// \t\t\tID: to.Ptr(\"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resRg/providers/Microsoft.ServiceFabric/clusters/myCluster\"),\n\t// \t\t\tLocation: to.Ptr(\"eastus\"),\n\t// \t\t\tTags: map[string]*string{\n\t// \t\t\t},\n\t// \t\t\tProperties: &armservicefabric.ClusterProperties{\n\t// \t\t\t\tAddOnFeatures: []*armservicefabric.AddOnFeatures{\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesRepairManager),\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesDNSService),\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesBackupRestoreService),\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesResourceMonitorService)},\n\t// \t\t\t\t\tAvailableClusterVersions: []*armservicefabric.ClusterVersionDetails{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tCodeVersion: to.Ptr(\"6.1.480.9494\"),\n\t// \t\t\t\t\t\t\tEnvironment: to.Ptr(armservicefabric.ClusterEnvironmentWindows),\n\t// \t\t\t\t\t\t\tSupportExpiryUTC: to.Ptr(\"2018-06-15T23:59:59.9999999\"),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tAzureActiveDirectory: &armservicefabric.AzureActiveDirectory{\n\t// \t\t\t\t\t\tClientApplication: to.Ptr(\"d151ad89-4bce-4ae8-b3d1-1dc79679fa75\"),\n\t// \t\t\t\t\t\tClusterApplication: to.Ptr(\"5886372e-7bf4-4878-a497-8098aba608ae\"),\n\t// \t\t\t\t\t\tTenantID: to.Ptr(\"6abcc6a0-8666-43f1-87b8-172cf86a9f9c\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tCertificateCommonNames: &armservicefabric.ServerCertificateCommonNames{\n\t// \t\t\t\t\t\tCommonNames: []*armservicefabric.ServerCertificateCommonName{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tCertificateCommonName: to.Ptr(\"abc.com\"),\n\t// \t\t\t\t\t\t\t\tCertificateIssuerThumbprint: to.Ptr(\"12599211F8F14C90AFA9532AD79A6F2CA1C00622\"),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tX509StoreName: to.Ptr(armservicefabric.StoreNameMy),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tClientCertificateCommonNames: []*armservicefabric.ClientCertificateCommonName{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tCertificateCommonName: to.Ptr(\"abc.com\"),\n\t// \t\t\t\t\t\t\tCertificateIssuerThumbprint: to.Ptr(\"5F3660C715EBBDA31DB1FFDCF508302348DE8E7A\"),\n\t// \t\t\t\t\t\t\tIsAdmin: to.Ptr(true),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tClientCertificateThumbprints: []*armservicefabric.ClientCertificateThumbprint{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tCertificateThumbprint: to.Ptr(\"5F3660C715EBBDA31DB1FFDCF508302348DE8E7A\"),\n\t// \t\t\t\t\t\t\tIsAdmin: to.Ptr(false),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tClusterCodeVersion: to.Ptr(\"6.1.480.9494\"),\n\t// \t\t\t\t\tClusterEndpoint: to.Ptr(\"https://eastus.servicefabric.azure.com\"),\n\t// \t\t\t\t\tClusterID: to.Ptr(\"92584666-9889-4ae8-8d02-91902923d37f\"),\n\t// \t\t\t\t\tClusterState: to.Ptr(armservicefabric.ClusterStateWaitingForNodes),\n\t// \t\t\t\t\tDiagnosticsStorageAccountConfig: &armservicefabric.DiagnosticsStorageAccountConfig{\n\t// \t\t\t\t\t\tBlobEndpoint: to.Ptr(\"https://diag.blob.core.windows.net/\"),\n\t// \t\t\t\t\t\tProtectedAccountKeyName: to.Ptr(\"StorageAccountKey1\"),\n\t// \t\t\t\t\t\tQueueEndpoint: to.Ptr(\"https://diag.queue.core.windows.net/\"),\n\t// \t\t\t\t\t\tStorageAccountName: to.Ptr(\"diag\"),\n\t// \t\t\t\t\t\tTableEndpoint: to.Ptr(\"https://diag.table.core.windows.net/\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tFabricSettings: []*armservicefabric.SettingsSectionDescription{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tName: to.Ptr(\"UpgradeService\"),\n\t// \t\t\t\t\t\t\tParameters: []*armservicefabric.SettingsParameterDescription{\n\t// \t\t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\t\tName: to.Ptr(\"AppPollIntervalInSeconds\"),\n\t// \t\t\t\t\t\t\t\t\tValue: to.Ptr(\"60\"),\n\t// \t\t\t\t\t\t\t}},\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tManagementEndpoint: to.Ptr(\"https://myCluster.eastus.cloudapp.azure.com:19080\"),\n\t// \t\t\t\t\tNodeTypes: []*armservicefabric.NodeTypeDescription{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tName: to.Ptr(\"nt1vm\"),\n\t// \t\t\t\t\t\t\tApplicationPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](30000),\n\t// \t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](20000),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tClientConnectionEndpointPort: to.Ptr[int32](19000),\n\t// \t\t\t\t\t\t\tDurabilityLevel: to.Ptr(armservicefabric.DurabilityLevelBronze),\n\t// \t\t\t\t\t\t\tEphemeralPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](64000),\n\t// \t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](49000),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tHTTPGatewayEndpointPort: to.Ptr[int32](19007),\n\t// \t\t\t\t\t\t\tIsPrimary: to.Ptr(true),\n\t// \t\t\t\t\t\t\tVMInstanceCount: to.Ptr[int32](5),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tProvisioningState: to.Ptr(armservicefabric.ProvisioningStateSucceeded),\n\t// \t\t\t\t\tReliabilityLevel: to.Ptr(armservicefabric.ReliabilityLevelSilver),\n\t// \t\t\t\t\tReverseProxyCertificateCommonNames: &armservicefabric.ServerCertificateCommonNames{\n\t// \t\t\t\t\t\tCommonNames: []*armservicefabric.ServerCertificateCommonName{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tCertificateCommonName: to.Ptr(\"abc.com\"),\n\t// \t\t\t\t\t\t\t\tCertificateIssuerThumbprint: to.Ptr(\"12599211F8F14C90AFA9532AD79A6F2CA1C00622\"),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tX509StoreName: to.Ptr(armservicefabric.StoreNameMy),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tUpgradeDescription: &armservicefabric.ClusterUpgradePolicy{\n\t// \t\t\t\t\t\tDeltaHealthPolicy: &armservicefabric.ClusterUpgradeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\tApplicationDeltaHealthPolicies: map[string]*armservicefabric.ApplicationDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\"fabric:/myApp1\": &armservicefabric.ApplicationDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\tDefaultServiceTypeDeltaHealthPolicy: &armservicefabric.ServiceTypeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyServices: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\tServiceTypeDeltaHealthPolicies: map[string]*armservicefabric.ServiceTypeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\"myServiceType1\": &armservicefabric.ServiceTypeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyServices: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tMaxPercentDeltaUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\tMaxPercentDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\tMaxPercentUpgradeDomainDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tForceRestart: to.Ptr(false),\n\t// \t\t\t\t\t\tHealthCheckRetryTimeout: to.Ptr(\"00:05:00\"),\n\t// \t\t\t\t\t\tHealthCheckStableDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\tHealthCheckWaitDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\tHealthPolicy: &armservicefabric.ClusterHealthPolicy{\n\t// \t\t\t\t\t\t\tApplicationHealthPolicies: map[string]*armservicefabric.ApplicationHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\"fabric:/myApp1\": &armservicefabric.ApplicationHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\tDefaultServiceTypeHealthPolicy: &armservicefabric.ServiceTypeHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\tMaxPercentUnhealthyServices: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\tServiceTypeHealthPolicies: map[string]*armservicefabric.ServiceTypeHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\"myServiceType1\": &armservicefabric.ServiceTypeHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\tMaxPercentUnhealthyServices: to.Ptr[int32](100),\n\t// \t\t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tMaxPercentUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\tMaxPercentUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tUpgradeDomainTimeout: to.Ptr(\"00:15:00\"),\n\t// \t\t\t\t\t\tUpgradeReplicaSetCheckTimeout: to.Ptr(\"00:10:00\"),\n\t// \t\t\t\t\t\tUpgradeTimeout: to.Ptr(\"01:00:00\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tUpgradeMode: to.Ptr(armservicefabric.UpgradeModeManual),\n\t// \t\t\t\t\tVMImage: to.Ptr(\"Windows\"),\n\t// \t\t\t\t},\n\t// \t\t\t},\n\t// \t\t\t{\n\t// \t\t\t\tName: to.Ptr(\"myCluster2\"),\n\t// \t\t\t\tType: to.Ptr(\"Microsoft.ServiceFabric/clusters\"),\n\t// \t\t\t\tEtag: to.Ptr(\"W/\\\"636462502164040075\\\"\"),\n\t// \t\t\t\tID: to.Ptr(\"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resRg/providers/Microsoft.ServiceFabric/clusters/myCluster2\"),\n\t// \t\t\t\tLocation: to.Ptr(\"eastus\"),\n\t// \t\t\t\tTags: map[string]*string{\n\t// \t\t\t\t},\n\t// \t\t\t\tProperties: &armservicefabric.ClusterProperties{\n\t// \t\t\t\t\tAddOnFeatures: []*armservicefabric.AddOnFeatures{\n\t// \t\t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesRepairManager)},\n\t// \t\t\t\t\t\tAvailableClusterVersions: []*armservicefabric.ClusterVersionDetails{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tCodeVersion: to.Ptr(\"6.1.187.1\"),\n\t// \t\t\t\t\t\t\t\tEnvironment: to.Ptr(armservicefabric.ClusterEnvironmentLinux),\n\t// \t\t\t\t\t\t\t\tSupportExpiryUTC: to.Ptr(\"2018-06-15T23:59:59.9999999\"),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tClientCertificateCommonNames: []*armservicefabric.ClientCertificateCommonName{\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tClientCertificateThumbprints: []*armservicefabric.ClientCertificateThumbprint{\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tClusterCodeVersion: to.Ptr(\"6.1.187.1\"),\n\t// \t\t\t\t\t\tClusterEndpoint: to.Ptr(\"https://eastus.servicefabric.azure.com\"),\n\t// \t\t\t\t\t\tClusterID: to.Ptr(\"2747e469-b24e-4039-8a0a-46151419523f\"),\n\t// \t\t\t\t\t\tClusterState: to.Ptr(armservicefabric.ClusterStateWaitingForNodes),\n\t// \t\t\t\t\t\tDiagnosticsStorageAccountConfig: &armservicefabric.DiagnosticsStorageAccountConfig{\n\t// \t\t\t\t\t\t\tBlobEndpoint: to.Ptr(\"https://diag.blob.core.windows.net/\"),\n\t// \t\t\t\t\t\t\tProtectedAccountKeyName: to.Ptr(\"StorageAccountKey1\"),\n\t// \t\t\t\t\t\t\tQueueEndpoint: to.Ptr(\"https://diag.queue.core.windows.net/\"),\n\t// \t\t\t\t\t\t\tStorageAccountName: to.Ptr(\"diag\"),\n\t// \t\t\t\t\t\t\tTableEndpoint: to.Ptr(\"https://diag.table.core.windows.net/\"),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tFabricSettings: []*armservicefabric.SettingsSectionDescription{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tName: to.Ptr(\"UpgradeService\"),\n\t// \t\t\t\t\t\t\t\tParameters: []*armservicefabric.SettingsParameterDescription{\n\t// \t\t\t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\t\t\tName: to.Ptr(\"AppPollIntervalInSeconds\"),\n\t// \t\t\t\t\t\t\t\t\t\tValue: to.Ptr(\"60\"),\n\t// \t\t\t\t\t\t\t\t}},\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tManagementEndpoint: to.Ptr(\"http://myCluster2.eastus.cloudapp.azure.com:19080\"),\n\t// \t\t\t\t\t\tNodeTypes: []*armservicefabric.NodeTypeDescription{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tName: to.Ptr(\"nt1vm\"),\n\t// \t\t\t\t\t\t\t\tApplicationPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](30000),\n\t// \t\t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](20000),\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\tClientConnectionEndpointPort: to.Ptr[int32](19000),\n\t// \t\t\t\t\t\t\t\tDurabilityLevel: to.Ptr(armservicefabric.DurabilityLevelBronze),\n\t// \t\t\t\t\t\t\t\tEphemeralPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](64000),\n\t// \t\t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](49000),\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\tHTTPGatewayEndpointPort: to.Ptr[int32](19007),\n\t// \t\t\t\t\t\t\t\tIsPrimary: to.Ptr(true),\n\t// \t\t\t\t\t\t\t\tVMInstanceCount: to.Ptr[int32](5),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tProvisioningState: to.Ptr(armservicefabric.ProvisioningStateSucceeded),\n\t// \t\t\t\t\t\tReliabilityLevel: to.Ptr(armservicefabric.ReliabilityLevelSilver),\n\t// \t\t\t\t\t\tUpgradeDescription: &armservicefabric.ClusterUpgradePolicy{\n\t// \t\t\t\t\t\t\tDeltaHealthPolicy: &armservicefabric.ClusterUpgradeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\tMaxPercentUpgradeDomainDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tForceRestart: to.Ptr(false),\n\t// \t\t\t\t\t\t\tHealthCheckRetryTimeout: to.Ptr(\"00:05:00\"),\n\t// \t\t\t\t\t\t\tHealthCheckStableDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\t\tHealthCheckWaitDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\t\tHealthPolicy: &armservicefabric.ClusterHealthPolicy{\n\t// \t\t\t\t\t\t\t\tMaxPercentUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\tMaxPercentUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tUpgradeDomainTimeout: to.Ptr(\"00:15:00\"),\n\t// \t\t\t\t\t\t\tUpgradeReplicaSetCheckTimeout: to.Ptr(\"00:10:00\"),\n\t// \t\t\t\t\t\t\tUpgradeTimeout: to.Ptr(\"01:00:00\"),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tUpgradeMode: to.Ptr(armservicefabric.UpgradeModeManual),\n\t// \t\t\t\t\t\tVMImage: to.Ptr(\"Ubuntu\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t}},\n\t// \t\t}\n}", "func ListRegistered() []string {\n\tvar out []string\n\n\tfor name := range registeredCalendars {\n\t\tout = append(out, name)\n\t}\n\tsort.Strings(out)\n\n\treturn out\n}", "func (page *ClusterListPage) Next() error {\n\treturn page.NextWithContext(context.Background())\n}", "func GetAllIngressesFromCluster(clientset *kubernetes.Clientset) (*networkingv1.IngressList, error) {\n\tingressList, err := clientset.NetworkingV1().Ingresses(\"\").List(context.TODO(), metav1.ListOptions{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ingressList, nil\n}", "func (a *Client) VirtualizationClusterTypesList(params *VirtualizationClusterTypesListParams) (*VirtualizationClusterTypesListOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewVirtualizationClusterTypesListParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"virtualization_cluster-types_list\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/api/virtualization/cluster-types/\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &VirtualizationClusterTypesListReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*VirtualizationClusterTypesListOK), nil\n\n}", "func NewClusterListResultIterator(page ClusterListResultPage) ClusterListResultIterator {\n\treturn ClusterListResultIterator{page: page}\n}", "func (client DeploymentsClient) ListForClusterComplete(ctx context.Context, resourceGroupName string, serviceName string, version []string) (result DeploymentResourceCollectionIterator, err error) {\n\tif tracing.IsEnabled() {\n\t\tctx = tracing.StartSpan(ctx, fqdn+\"/DeploymentsClient.ListForCluster\")\n\t\tdefer func() {\n\t\t\tsc := -1\n\t\t\tif result.Response().Response.Response != nil {\n\t\t\t\tsc = result.page.Response().Response.Response.StatusCode\n\t\t\t}\n\t\t\ttracing.EndSpan(ctx, sc, err)\n\t\t}()\n\t}\n\tresult.page, err = client.ListForCluster(ctx, resourceGroupName, serviceName, version)\n\treturn\n}", "func idcList() []string {\n\tm, _ := idcRegion.Load().(map[string]string)\n\tidcList := make([]string, 0, len(m))\n\tfor dc, _ := range m {\n\t\tidcList = append(idcList, dc)\n\t}\n\treturn idcList\n}", "func (page ClusterListPage) Values() []Cluster {\n\tif page.cl.IsEmpty() {\n\t\treturn nil\n\t}\n\treturn *page.cl.Value\n}", "func (page *ClusterListResultPage) Next() error {\n\treturn page.NextWithContext(context.Background())\n}", "func (c *Client) GetFloatingIPsForCluster(name string) ([]*FloatingIP, error) {\n\treq, err := http.NewRequest(\"GET\", hetznerAPIBaseURL+\"/floating_ips\", nil)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating request: %w\", err)\n\t}\n\tq := req.URL.Query()\n\tq.Add(\"label_selector\", \"cluster==\"+name)\n\treq.URL.RawQuery = q.Encode()\n\treq.Header.Add(\"Authorization\", \"Bearer \"+c.hCloudToken)\n\tresp, err := c.httpClient.Do(req)\n\tif err != nil {\n\t\tvar neterr net.Error\n\t\tif errors.As(err, &neterr) && (neterr.Timeout() || neterr.Temporary()) {\n\t\t\treturn nil, &errorx.RetryableError{Message: \"timeout or temporary error in HTTP request\", Err: neterr}\n\t\t}\n\t\treturn nil, fmt.Errorf(\"error in http request: %w\", err)\n\t}\n\tdefer resp.Body.Close()\n\tif resp.StatusCode != 200 {\n\t\treturn nil, fmt.Errorf(\"unexpected status code %d != 200\", resp.StatusCode)\n\t}\n\tvar rawFloatingIPs struct {\n\t\tFloatingIPs []struct {\n\t\t\tType string `json:\"type\"`\n\t\t\tIP string `json:\"ip\"`\n\t\t} `json:\"floating_ips\"`\n\t}\n\tdecoder := json.NewDecoder(resp.Body)\n\terr = decoder.Decode(&rawFloatingIPs)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error unmarshalling JSON: %w\", err)\n\t}\n\tvar floatingIPs []*FloatingIP\n\tfor _, rawIP := range rawFloatingIPs.FloatingIPs {\n\t\tip := &FloatingIP{IP: rawIP.IP}\n\t\tswitch rawIP.Type {\n\t\tcase \"ipv4\":\n\t\t\tip.Type = FloatingIPv4\n\t\tcase \"ipv6\":\n\t\t\tip.Type = FloatingIPv6\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"unexpected IP type '%s'\", rawIP.Type)\n\t\t}\n\t\tfloatingIPs = append(floatingIPs, ip)\n\t}\n\treturn floatingIPs, nil\n}", "func parseIPv6AddressInPxctlClusterList(output string, nodeCount int) []string {\n\toption := newIPv6ParserOption(\"ID\\t\", 2, nodeCount)\n\tp := newIPv6Parser([]parserOption{option})\n\treturn p.parse(output)\n}", "func (re *stubRegistrationService) ListByParentID(ctx context.Context, request pb.ParentID) (reply common.RegistrationEntries, err error) {\n\tentries, err := re.registration.ListEntryByParentSpiffeID(request.Id)\n\treply = common.RegistrationEntries{Entries: entries}\n\treturn reply, err\n}", "func (a *Client) VirtualizationClusterTypesRead(params *VirtualizationClusterTypesReadParams) (*VirtualizationClusterTypesReadOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewVirtualizationClusterTypesReadParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"virtualization_cluster-types_read\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/api/virtualization/cluster-types/{id}/\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\"},\n\t\tParams: params,\n\t\tReader: &VirtualizationClusterTypesReadReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*VirtualizationClusterTypesReadOK), nil\n\n}", "func (a *HyperflexApiService) GetHyperflexProtectedClusterList(ctx context.Context) ApiGetHyperflexProtectedClusterListRequest {\n\treturn ApiGetHyperflexProtectedClusterListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (m *manager) List() ([]string, error) {\n\tvar igs []*compute.InstanceGroup\n\n\tzones, err := m.ListZones(utils.AllNodesPredicate)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, zone := range zones {\n\t\tigsForZone, err := m.cloud.ListInstanceGroups(zone)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tfor _, ig := range igsForZone {\n\t\t\tigs = append(igs, ig)\n\t\t}\n\t}\n\n\tvar names []string\n\tfor _, ig := range igs {\n\t\tif m.namer.NameBelongsToCluster(ig.Name) {\n\t\t\tnames = append(names, ig.Name)\n\t\t}\n\t}\n\n\treturn names, nil\n}", "func (c *gcLifecycle) Remove(cluster *v3.Cluster) (runtime.Object, error) {\n\tif err := c.waitForNodeRemoval(cluster); err != nil {\n\t\treturn cluster, err // ErrSkip if we still need to wait\n\t}\n\n\tRESTconfig := c.mgmt.RESTConfig\n\t// due to the large number of api calls, temporary raise the burst limit in order to reduce client throttling\n\tRESTconfig.Burst = 25\n\tdynamicClient, err := dynamic.NewForConfig(&RESTconfig)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdecodedMap := resource.GetClusterScopedTypes()\n\t//if map is empty, fall back to checking all Rancher types\n\tif len(decodedMap) == 0 {\n\t\tdecodedMap = resource.Get()\n\t}\n\tvar g errgroup.Group\n\n\tfor key := range decodedMap {\n\t\tactualKey := key // https://golang.org/doc/faq#closures_and_goroutines\n\t\tg.Go(func() error {\n\t\t\tobjList, err := dynamicClient.Resource(actualKey).List(context.TODO(), metav1.ListOptions{})\n\t\t\tif err != nil {\n\t\t\t\tif errors.IsNotFound(err) {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfor _, obj := range objList.Items {\n\t\t\t\t_, err = cleanFinalizers(cluster.Name, &obj, dynamicClient.Resource(actualKey).Namespace(obj.GetNamespace()))\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn nil\n\t\t})\n\t}\n\tif err = g.Wait(); err != nil {\n\t\treturn nil, err\n\t}\n\treturn nil, nil\n}", "func (p *Provider) List() ([]string, error) {\n\treturn p.provider.ListClusters()\n}", "func (m *manager) listIGInstances(name string) (sets.String, error) {\n\tnodeNames := sets.NewString()\n\tzones, err := m.ListZones(utils.AllNodesPredicate)\n\tif err != nil {\n\t\treturn nodeNames, err\n\t}\n\n\tfor _, zone := range zones {\n\t\tinstances, err := m.cloud.ListInstancesInInstanceGroup(name, zone, allInstances)\n\t\tif err != nil {\n\t\t\treturn nodeNames, err\n\t\t}\n\t\tfor _, ins := range instances {\n\t\t\tname, err := utils.KeyName(ins.Instance)\n\t\t\tif err != nil {\n\t\t\t\treturn nodeNames, err\n\t\t\t}\n\t\t\tnodeNames.Insert(name)\n\t\t}\n\t}\n\treturn nodeNames, nil\n}", "func GetRedshiftClusterConfig(c *redshift.Cluster) []AWSResourceConfig {\n\tcf := RedshiftClusterConfig{\n\t\tConfig: Config{\n\t\t\tName: c.DBName,\n\t\t\tTags: c.Tags,\n\t\t},\n\t\tKmsKeyID: c.KmsKeyId,\n\t\tEncrypted: c.Encrypted,\n\t\tPubliclyAccessible: c.PubliclyAccessible,\n\t}\n\tif c.LoggingProperties != nil {\n\t\t// if LoggingProperties are mentioned in cft,\n\t\t// its always enabled\n\t\tlogging := make(map[string]bool)\n\t\tlogging[\"enable\"] = true\n\t\tcf.LoggingProperties = []map[string]bool{logging}\n\t}\n\treturn []AWSResourceConfig{{\n\t\tResource: cf,\n\t\tMetadata: c.AWSCloudFormationMetadata,\n\t}}\n}", "func (o CidrCollectionLocationOutput) CidrList() pulumi.StringArrayOutput {\n\treturn o.ApplyT(func(v CidrCollectionLocation) []string { return v.CidrList }).(pulumi.StringArrayOutput)\n}", "func expandPrivateCloudManagementClusterSlice(c *Client, f []PrivateCloudManagementCluster, res *PrivateCloud) ([]map[string]interface{}, error) {\n\tif f == nil {\n\t\treturn nil, nil\n\t}\n\n\titems := []map[string]interface{}{}\n\tfor _, item := range f {\n\t\ti, err := expandPrivateCloudManagementCluster(c, &item, res)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\titems = append(items, i)\n\t}\n\n\treturn items, nil\n}", "func GetAllClusterNode(client redigo.Conn, role string, choose string) ([]string, error) {\n\tret, err := client.Do(\"cluster\", \"nodes\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tnodeList := ParseClusterNode(ret.([]byte))\n\tnodeListChoose := ClusterNodeChoose(nodeList, role)\n\n\tresult := make([]string, 0, len(nodeListChoose))\n\tfor _, ele := range nodeListChoose {\n\t\tif choose == \"id\" {\n\t\t\tresult = append(result, ele.Id)\n\t\t} else {\n\t\t\tresult = append(result, ele.Address)\n\t\t}\n\t}\n\n\treturn result, nil\n}", "func (r *NodeRegistry) List() []ipfs.NodeInfo {\n\tvar (\n\t\tnodes = make([]ipfs.NodeInfo, len(r.nodes))\n\t\ti = 0\n\t)\n\n\tr.nm.RLock()\n\tfor _, n := range r.nodes {\n\t\tnodes[i] = *n\n\t\ti++\n\t}\n\tr.nm.RUnlock()\n\n\treturn nodes\n}", "func (s *recoveryServicesProtectionContainerLister) List(selector labels.Selector) (ret []*v1alpha1.RecoveryServicesProtectionContainer, err error) {\n\terr = cache.ListAll(s.indexer, selector, func(m interface{}) {\n\t\tret = append(ret, m.(*v1alpha1.RecoveryServicesProtectionContainer))\n\t})\n\treturn ret, err\n}", "func (c *Cluster) List() dcs.List {\n\treturn dcs.List{\n\t\tOptions: c.cfg.DCOptions,\n\t\tDomains: c.domains,\n\t}\n}", "func (page RegistrationDefinitionListPage) Values() []RegistrationDefinition {\n\tif page.rdl.IsEmpty() {\n\t\treturn nil\n\t}\n\treturn *page.rdl.Value\n}", "func ListRegisteredValues() []string {\n\tvals := make([]string, len(valueMap))\n\n\ti := 0\n\tfor key := range valueMap {\n\t\tvals[i] = string(key)\n\t\ti++\n\t}\n\n\treturn vals\n}", "func ListAllCluster(c echo.Context) error {\n\tcblog.Info(\"call ListAllCluster()\")\n\n\tvar req struct {\n\t\tNameSpace string\n\t\tConnectionName string\n\t}\n\n\tif err := c.Bind(&req); err != nil {\n\t\treturn echo.NewHTTPError(http.StatusInternalServerError, err.Error())\n\t}\n\n\t// To support for Get-Query Param Type API\n\tif req.ConnectionName == \"\" {\n\t\treq.ConnectionName = c.QueryParam(\"ConnectionName\")\n\t}\n\n\t// Call common-runtime API\n\tallResourceList, err := cmrt.ListAllResource(req.ConnectionName, rsCluster)\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusInternalServerError, err.Error())\n\t}\n\n\t// To support for Get-Query Param Type API\n\tif req.NameSpace == \"\" {\n\t\treq.NameSpace = c.QueryParam(\"NameSpace\")\n\t}\n\n\t// Resource Name has namespace prefix when from Tumblebug\n\tif req.NameSpace != \"\" {\n\t\tnameSpace := req.NameSpace + \"-\"\n\t\tfor idx, IID := range allResourceList.AllList.MappedList {\n\t\t\tif IID.NameId != \"\" {\n\t\t\t\tallResourceList.AllList.MappedList[idx].NameId = strings.Replace(IID.NameId, nameSpace, \"\", 1)\n\t\t\t}\n\t\t}\n\t\tfor idx, IID := range allResourceList.AllList.OnlySpiderList {\n\t\t\tif IID.NameId != \"\" {\n\t\t\t\tallResourceList.AllList.OnlySpiderList[idx].NameId = strings.Replace(IID.NameId, nameSpace, \"\", 1)\n\t\t\t}\n\t\t}\n\t\tfor idx, IID := range allResourceList.AllList.OnlyCSPList {\n\t\t\tif IID.NameId != \"\" {\n\t\t\t\tallResourceList.AllList.OnlyCSPList[idx].NameId = strings.Replace(IID.NameId, nameSpace, \"\", 1)\n\t\t\t}\n\t\t}\n\t}\n\n\tvar jsonResult struct {\n\t\tConnection string\n\t\tAllResourceList *cmrt.AllResourceList\n\t}\n\tjsonResult.Connection = req.ConnectionName\n\tjsonResult.AllResourceList = &allResourceList\n\n\treturn c.JSON(http.StatusOK, &jsonResult)\n}", "func (c *ConfigCache) FetchAndConvertInsuranceGroupFactorList() (error) {\n log.Println(\"In FetchAndConvertInsuranceGroupFactorList \")\n var temp []models.InsuranceGroupFactor\n\tres, err := c.Fetcher.ReadFileAndGetAsObject(\"insurance-group-factor.json\" , temp)\n\tif err != nil {\n\t\tlog.Println(\"error reading the config file: \", err)\n\t\treturn err\n\t}\n\tlog.Printf(\"List : %+v\", res)\n\ttemp = res.([]models.InsuranceGroupFactor)\n factorMapper := util.FactorMapper{}\n \tc.InsuranceGroupFactorList = factorMapper.InsuranceGroupFactorToRangeConfig(temp)\n log.Println(\"Mapped range config from file: +v\", c.InsuranceGroupFactorList)\n return nil\n}", "func ClusterListOpt(clusterName string) metav1.ListOptions {\n\treturn metav1.ListOptions{\n\t\tLabelSelector: labels.SelectorFromSet(LabelsForCluster(clusterName)).String(),\n\t}\n}", "func ClusterListOpt(clusterName string) metav1.ListOptions {\n\treturn metav1.ListOptions{\n\t\tLabelSelector: labels.SelectorFromSet(LabelsForCluster(clusterName)).String(),\n\t}\n}", "func (s *StorageClusterAPI) List(w http.ResponseWriter, r *http.Request) {\n\tclusters, err := s.storageClusterService.List()\n\tif err != nil {\n\t\tapi.Error(w, err)\n\t\treturn\n\t}\n\tapi.OK(w, clusters)\n}", "func rgwGetGCTaskList(config string, user string) ([]byte, error) {\n\tvar (\n\t\tout []byte\n\t\terr error\n\t)\n\n\tif out, err = exec.Command(radosgwAdminPath, \"-c\", config, \"--user\", user, \"gc\", \"list\", \"--include-all\").Output(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn out, nil\n}", "func listClusters(w http.ResponseWriter, r *http.Request, t auth.Token) (err error) {\n\tctx := r.Context()\n\tallowed := permission.Check(t, permission.PermClusterRead)\n\tif !allowed {\n\t\treturn permission.ErrUnauthorized\n\t}\n\tclusters, err := servicemanager.Cluster.List(ctx)\n\tif err != nil {\n\t\tif err == provTypes.ErrNoCluster {\n\t\t\tw.WriteHeader(http.StatusNoContent)\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\tadmin := permission.Check(t, permission.PermClusterAdmin)\n\tif !admin {\n\t\tfor i := range clusters {\n\t\t\tclusters[i].CleanUpSensitive()\n\t\t}\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\treturn json.NewEncoder(w).Encode(clusters)\n}", "func ExampleClustersClient_List() {\n\tcred, err := azidentity.NewDefaultAzureCredential(nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to obtain a credential: %v\", err)\n\t}\n\tctx := context.Background()\n\tclientFactory, err := armservicefabric.NewClientFactory(\"<subscription-id>\", cred, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create client: %v\", err)\n\t}\n\tres, err := clientFactory.NewClustersClient().List(ctx, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to finish the request: %v\", err)\n\t}\n\t// You could use response here. We use blank identifier for just demo purposes.\n\t_ = res\n\t// If the HTTP response code is 200 as defined in example definition, your response structure would look as follows. Please pay attention that all the values in the output are fake values for just demo purposes.\n\t// res.ClusterListResult = armservicefabric.ClusterListResult{\n\t// \tValue: []*armservicefabric.Cluster{\n\t// \t\t{\n\t// \t\t\tName: to.Ptr(\"myCluster\"),\n\t// \t\t\tType: to.Ptr(\"Microsoft.ServiceFabric/clusters\"),\n\t// \t\t\tEtag: to.Ptr(\"W/\\\"636462502169240745\\\"\"),\n\t// \t\t\tID: to.Ptr(\"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resRg/providers/Microsoft.ServiceFabric/clusters/myCluster\"),\n\t// \t\t\tLocation: to.Ptr(\"eastus\"),\n\t// \t\t\tTags: map[string]*string{\n\t// \t\t\t},\n\t// \t\t\tProperties: &armservicefabric.ClusterProperties{\n\t// \t\t\t\tAddOnFeatures: []*armservicefabric.AddOnFeatures{\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesRepairManager),\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesDNSService),\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesBackupRestoreService),\n\t// \t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesResourceMonitorService)},\n\t// \t\t\t\t\tAvailableClusterVersions: []*armservicefabric.ClusterVersionDetails{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tCodeVersion: to.Ptr(\"6.1.480.9494\"),\n\t// \t\t\t\t\t\t\tEnvironment: to.Ptr(armservicefabric.ClusterEnvironmentWindows),\n\t// \t\t\t\t\t\t\tSupportExpiryUTC: to.Ptr(\"2018-06-15T23:59:59.9999999\"),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tAzureActiveDirectory: &armservicefabric.AzureActiveDirectory{\n\t// \t\t\t\t\t\tClientApplication: to.Ptr(\"d151ad89-4bce-4ae8-b3d1-1dc79679fa75\"),\n\t// \t\t\t\t\t\tClusterApplication: to.Ptr(\"5886372e-7bf4-4878-a497-8098aba608ae\"),\n\t// \t\t\t\t\t\tTenantID: to.Ptr(\"6abcc6a0-8666-43f1-87b8-172cf86a9f9c\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tCertificateCommonNames: &armservicefabric.ServerCertificateCommonNames{\n\t// \t\t\t\t\t\tCommonNames: []*armservicefabric.ServerCertificateCommonName{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tCertificateCommonName: to.Ptr(\"abc.com\"),\n\t// \t\t\t\t\t\t\t\tCertificateIssuerThumbprint: to.Ptr(\"12599211F8F14C90AFA9532AD79A6F2CA1C00622\"),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tX509StoreName: to.Ptr(armservicefabric.StoreNameMy),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tClientCertificateCommonNames: []*armservicefabric.ClientCertificateCommonName{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tCertificateCommonName: to.Ptr(\"abc.com\"),\n\t// \t\t\t\t\t\t\tCertificateIssuerThumbprint: to.Ptr(\"5F3660C715EBBDA31DB1FFDCF508302348DE8E7A\"),\n\t// \t\t\t\t\t\t\tIsAdmin: to.Ptr(true),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tClientCertificateThumbprints: []*armservicefabric.ClientCertificateThumbprint{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tCertificateThumbprint: to.Ptr(\"5F3660C715EBBDA31DB1FFDCF508302348DE8E7A\"),\n\t// \t\t\t\t\t\t\tIsAdmin: to.Ptr(false),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tClusterCodeVersion: to.Ptr(\"6.1.480.9494\"),\n\t// \t\t\t\t\tClusterEndpoint: to.Ptr(\"https://eastus.servicefabric.azure.com\"),\n\t// \t\t\t\t\tClusterID: to.Ptr(\"92584666-9889-4ae8-8d02-91902923d37f\"),\n\t// \t\t\t\t\tClusterState: to.Ptr(armservicefabric.ClusterStateWaitingForNodes),\n\t// \t\t\t\t\tDiagnosticsStorageAccountConfig: &armservicefabric.DiagnosticsStorageAccountConfig{\n\t// \t\t\t\t\t\tBlobEndpoint: to.Ptr(\"https://diag.blob.core.windows.net/\"),\n\t// \t\t\t\t\t\tProtectedAccountKeyName: to.Ptr(\"StorageAccountKey1\"),\n\t// \t\t\t\t\t\tQueueEndpoint: to.Ptr(\"https://diag.queue.core.windows.net/\"),\n\t// \t\t\t\t\t\tStorageAccountName: to.Ptr(\"diag\"),\n\t// \t\t\t\t\t\tTableEndpoint: to.Ptr(\"https://diag.table.core.windows.net/\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tFabricSettings: []*armservicefabric.SettingsSectionDescription{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tName: to.Ptr(\"UpgradeService\"),\n\t// \t\t\t\t\t\t\tParameters: []*armservicefabric.SettingsParameterDescription{\n\t// \t\t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\t\tName: to.Ptr(\"AppPollIntervalInSeconds\"),\n\t// \t\t\t\t\t\t\t\t\tValue: to.Ptr(\"60\"),\n\t// \t\t\t\t\t\t\t}},\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tManagementEndpoint: to.Ptr(\"https://myCluster.eastus.cloudapp.azure.com:19080\"),\n\t// \t\t\t\t\tNodeTypes: []*armservicefabric.NodeTypeDescription{\n\t// \t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\tName: to.Ptr(\"nt1vm\"),\n\t// \t\t\t\t\t\t\tApplicationPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](30000),\n\t// \t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](20000),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tClientConnectionEndpointPort: to.Ptr[int32](19000),\n\t// \t\t\t\t\t\t\tDurabilityLevel: to.Ptr(armservicefabric.DurabilityLevelBronze),\n\t// \t\t\t\t\t\t\tEphemeralPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](64000),\n\t// \t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](49000),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tHTTPGatewayEndpointPort: to.Ptr[int32](19007),\n\t// \t\t\t\t\t\t\tIsPrimary: to.Ptr(true),\n\t// \t\t\t\t\t\t\tVMInstanceCount: to.Ptr[int32](5),\n\t// \t\t\t\t\t}},\n\t// \t\t\t\t\tProvisioningState: to.Ptr(armservicefabric.ProvisioningStateSucceeded),\n\t// \t\t\t\t\tReliabilityLevel: to.Ptr(armservicefabric.ReliabilityLevelSilver),\n\t// \t\t\t\t\tReverseProxyCertificateCommonNames: &armservicefabric.ServerCertificateCommonNames{\n\t// \t\t\t\t\t\tCommonNames: []*armservicefabric.ServerCertificateCommonName{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tCertificateCommonName: to.Ptr(\"abc.com\"),\n\t// \t\t\t\t\t\t\t\tCertificateIssuerThumbprint: to.Ptr(\"12599211F8F14C90AFA9532AD79A6F2CA1C00622\"),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tX509StoreName: to.Ptr(armservicefabric.StoreNameMy),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tUpgradeDescription: &armservicefabric.ClusterUpgradePolicy{\n\t// \t\t\t\t\t\tDeltaHealthPolicy: &armservicefabric.ClusterUpgradeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\tApplicationDeltaHealthPolicies: map[string]*armservicefabric.ApplicationDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\"fabric:/myApp1\": &armservicefabric.ApplicationDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\tDefaultServiceTypeDeltaHealthPolicy: &armservicefabric.ServiceTypeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyServices: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\tServiceTypeDeltaHealthPolicies: map[string]*armservicefabric.ServiceTypeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\"myServiceType1\": &armservicefabric.ServiceTypeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyServices: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tMaxPercentDeltaUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\tMaxPercentDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\tMaxPercentUpgradeDomainDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tForceRestart: to.Ptr(false),\n\t// \t\t\t\t\t\tHealthCheckRetryTimeout: to.Ptr(\"00:05:00\"),\n\t// \t\t\t\t\t\tHealthCheckStableDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\tHealthCheckWaitDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\tHealthPolicy: &armservicefabric.ClusterHealthPolicy{\n\t// \t\t\t\t\t\t\tApplicationHealthPolicies: map[string]*armservicefabric.ApplicationHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\"fabric:/myApp1\": &armservicefabric.ApplicationHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\tDefaultServiceTypeHealthPolicy: &armservicefabric.ServiceTypeHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\tMaxPercentUnhealthyServices: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\tServiceTypeHealthPolicies: map[string]*armservicefabric.ServiceTypeHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\"myServiceType1\": &armservicefabric.ServiceTypeHealthPolicy{\n\t// \t\t\t\t\t\t\t\t\t\t\tMaxPercentUnhealthyServices: to.Ptr[int32](100),\n\t// \t\t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tMaxPercentUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\tMaxPercentUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tUpgradeDomainTimeout: to.Ptr(\"00:15:00\"),\n\t// \t\t\t\t\t\tUpgradeReplicaSetCheckTimeout: to.Ptr(\"00:10:00\"),\n\t// \t\t\t\t\t\tUpgradeTimeout: to.Ptr(\"01:00:00\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t\t\tUpgradeMode: to.Ptr(armservicefabric.UpgradeModeManual),\n\t// \t\t\t\t\tVMImage: to.Ptr(\"Windows\"),\n\t// \t\t\t\t},\n\t// \t\t\t},\n\t// \t\t\t{\n\t// \t\t\t\tName: to.Ptr(\"myCluster2\"),\n\t// \t\t\t\tType: to.Ptr(\"Microsoft.ServiceFabric/clusters\"),\n\t// \t\t\t\tEtag: to.Ptr(\"W/\\\"636462502164040075\\\"\"),\n\t// \t\t\t\tID: to.Ptr(\"/subscriptions/00000000-0000-0000-0000-000000000000/resourcegroups/resRg/providers/Microsoft.ServiceFabric/clusters/myCluster2\"),\n\t// \t\t\t\tLocation: to.Ptr(\"eastus\"),\n\t// \t\t\t\tTags: map[string]*string{\n\t// \t\t\t\t},\n\t// \t\t\t\tProperties: &armservicefabric.ClusterProperties{\n\t// \t\t\t\t\tAddOnFeatures: []*armservicefabric.AddOnFeatures{\n\t// \t\t\t\t\t\tto.Ptr(armservicefabric.AddOnFeaturesRepairManager)},\n\t// \t\t\t\t\t\tAvailableClusterVersions: []*armservicefabric.ClusterVersionDetails{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tCodeVersion: to.Ptr(\"6.1.187.1\"),\n\t// \t\t\t\t\t\t\t\tEnvironment: to.Ptr(armservicefabric.ClusterEnvironmentLinux),\n\t// \t\t\t\t\t\t\t\tSupportExpiryUTC: to.Ptr(\"2018-06-15T23:59:59.9999999\"),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tClientCertificateCommonNames: []*armservicefabric.ClientCertificateCommonName{\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tClientCertificateThumbprints: []*armservicefabric.ClientCertificateThumbprint{\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tClusterCodeVersion: to.Ptr(\"6.1.187.1\"),\n\t// \t\t\t\t\t\tClusterEndpoint: to.Ptr(\"https://eastus.servicefabric.azure.com\"),\n\t// \t\t\t\t\t\tClusterID: to.Ptr(\"2747e469-b24e-4039-8a0a-46151419523f\"),\n\t// \t\t\t\t\t\tClusterState: to.Ptr(armservicefabric.ClusterStateWaitingForNodes),\n\t// \t\t\t\t\t\tDiagnosticsStorageAccountConfig: &armservicefabric.DiagnosticsStorageAccountConfig{\n\t// \t\t\t\t\t\t\tBlobEndpoint: to.Ptr(\"https://diag.blob.core.windows.net/\"),\n\t// \t\t\t\t\t\t\tProtectedAccountKeyName: to.Ptr(\"StorageAccountKey1\"),\n\t// \t\t\t\t\t\t\tQueueEndpoint: to.Ptr(\"https://diag.queue.core.windows.net/\"),\n\t// \t\t\t\t\t\t\tStorageAccountName: to.Ptr(\"diag\"),\n\t// \t\t\t\t\t\t\tTableEndpoint: to.Ptr(\"https://diag.table.core.windows.net/\"),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tFabricSettings: []*armservicefabric.SettingsSectionDescription{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tName: to.Ptr(\"UpgradeService\"),\n\t// \t\t\t\t\t\t\t\tParameters: []*armservicefabric.SettingsParameterDescription{\n\t// \t\t\t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\t\t\tName: to.Ptr(\"AppPollIntervalInSeconds\"),\n\t// \t\t\t\t\t\t\t\t\t\tValue: to.Ptr(\"60\"),\n\t// \t\t\t\t\t\t\t\t}},\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tManagementEndpoint: to.Ptr(\"http://myCluster2.eastus.cloudapp.azure.com:19080\"),\n\t// \t\t\t\t\t\tNodeTypes: []*armservicefabric.NodeTypeDescription{\n\t// \t\t\t\t\t\t\t{\n\t// \t\t\t\t\t\t\t\tName: to.Ptr(\"nt1vm\"),\n\t// \t\t\t\t\t\t\t\tApplicationPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](30000),\n\t// \t\t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](20000),\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\tClientConnectionEndpointPort: to.Ptr[int32](19000),\n\t// \t\t\t\t\t\t\t\tDurabilityLevel: to.Ptr(armservicefabric.DurabilityLevelBronze),\n\t// \t\t\t\t\t\t\t\tEphemeralPorts: &armservicefabric.EndpointRangeDescription{\n\t// \t\t\t\t\t\t\t\t\tEndPort: to.Ptr[int32](64000),\n\t// \t\t\t\t\t\t\t\t\tStartPort: to.Ptr[int32](49000),\n\t// \t\t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\t\tHTTPGatewayEndpointPort: to.Ptr[int32](19007),\n\t// \t\t\t\t\t\t\t\tIsPrimary: to.Ptr(true),\n\t// \t\t\t\t\t\t\t\tVMInstanceCount: to.Ptr[int32](5),\n\t// \t\t\t\t\t\t}},\n\t// \t\t\t\t\t\tProvisioningState: to.Ptr(armservicefabric.ProvisioningStateSucceeded),\n\t// \t\t\t\t\t\tReliabilityLevel: to.Ptr(armservicefabric.ReliabilityLevelSilver),\n\t// \t\t\t\t\t\tUpgradeDescription: &armservicefabric.ClusterUpgradePolicy{\n\t// \t\t\t\t\t\t\tDeltaHealthPolicy: &armservicefabric.ClusterUpgradeDeltaHealthPolicy{\n\t// \t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\tMaxPercentDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\tMaxPercentUpgradeDomainDeltaUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tForceRestart: to.Ptr(false),\n\t// \t\t\t\t\t\t\tHealthCheckRetryTimeout: to.Ptr(\"00:05:00\"),\n\t// \t\t\t\t\t\t\tHealthCheckStableDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\t\tHealthCheckWaitDuration: to.Ptr(\"00:00:30\"),\n\t// \t\t\t\t\t\t\tHealthPolicy: &armservicefabric.ClusterHealthPolicy{\n\t// \t\t\t\t\t\t\t\tMaxPercentUnhealthyApplications: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t\tMaxPercentUnhealthyNodes: to.Ptr[int32](0),\n\t// \t\t\t\t\t\t\t},\n\t// \t\t\t\t\t\t\tUpgradeDomainTimeout: to.Ptr(\"00:15:00\"),\n\t// \t\t\t\t\t\t\tUpgradeReplicaSetCheckTimeout: to.Ptr(\"00:10:00\"),\n\t// \t\t\t\t\t\t\tUpgradeTimeout: to.Ptr(\"01:00:00\"),\n\t// \t\t\t\t\t\t},\n\t// \t\t\t\t\t\tUpgradeMode: to.Ptr(armservicefabric.UpgradeModeManual),\n\t// \t\t\t\t\t\tVMImage: to.Ptr(\"Ubuntu\"),\n\t// \t\t\t\t\t},\n\t// \t\t\t}},\n\t// \t\t}\n}", "func (s *ClusterListener) ListNodes(inctx context.Context, in *protocol.Reference) (_ *protocol.ClusterNodeListResponse, err error) {\n\tdefer fail.OnExitConvertToGRPCStatus(inctx, &err)\n\tdefer fail.OnExitWrapError(inctx, &err, \"cannot list cluster nodes\")\n\n\tif s == nil {\n\t\treturn nil, fail.InvalidInstanceError()\n\t}\n\tif inctx == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"inctx\")\n\t}\n\tif in == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"in\")\n\t}\n\n\tref, _ := srvutils.GetReference(in)\n\tif ref == \"\" {\n\t\treturn nil, fail.InvalidRequestError(\"cluster name is missing\")\n\t}\n\n\tjob, err := PrepareJob(inctx, in.GetTenantId(), fmt.Sprintf(\"/cluster/%s/nodes/list\", ref))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer job.Close()\n\n\thandler := handlers.NewClusterHandler(job)\n\n\tlist, xerr := handler.ListNodes(ref)\n\tif xerr != nil {\n\t\treturn nil, xerr\n\t}\n\n\tout := &protocol.ClusterNodeListResponse{}\n\tout.Nodes = make([]*protocol.Host, 0, len(list))\n\tfor _, v := range list {\n\t\tout.Nodes = append(out.Nodes, &protocol.Host{\n\t\t\tId: v.ID,\n\t\t\tName: v.Name,\n\t\t})\n\t}\n\treturn out, nil\n}", "func List(c *client.Client) (*rbacv1.ClusterRoleList, error) {\n\tclusterrolelist, err := c.Clientset.RbacV1().ClusterRoles().List(context.TODO(), metav1.ListOptions{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn clusterrolelist, nil\n}", "func testListClusterNodes(t *testing.T) {\n\tctx := context.Background()\n\n\t// Init BCE Client\n\tak := \"xxxxxxxx\"\n\tsk := \"xxxxxxxx\"\n\tregion := \"sz\"\n\tendpoint := \"cce.su.baidubce.com\"\n\n\tc := newClient(ak, sk, region, endpoint)\n\n\t// Test ListClusterNodes\n\tnodesResq, err := c.ListClusterNodes(ctx, \"xxxxxx\", nil)\n\tif err != nil {\n\t\tt.Errorf(\"ListClusterNodes failed: %v\", err)\n\t\treturn\n\t}\n\n\tstr, _ := json.Marshal(nodesResq)\n\tt.Errorf(\"ListClusterNodes failed: %v\", string(str))\n}", "func (*RegistrationListRes) Descriptor() ([]byte, []int) {\n\treturn file_registration_proto_rawDescGZIP(), []int{24}\n}", "func (re *stubRegistrationService) ListBySelector(ctx context.Context, request common.Selector) (reply common.RegistrationEntries, err error) {\n\treturn reply, err\n}", "func (client InfraRoleInstancesClient) List(ctx context.Context, location string, filter string) (result InfraRoleInstanceListPage, err error) {\n\tresult.fn = client.listNextResults\n\treq, err := client.ListPreparer(ctx, location, filter)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"fabric.InfraRoleInstancesClient\", \"List\", nil, \"Failure preparing request\")\n\t\treturn\n\t}\n\n\tresp, err := client.ListSender(req)\n\tif err != nil {\n\t\tresult.iril.Response = autorest.Response{Response: resp}\n\t\terr = autorest.NewErrorWithError(err, \"fabric.InfraRoleInstancesClient\", \"List\", resp, \"Failure sending request\")\n\t\treturn\n\t}\n\n\tresult.iril, err = client.ListResponder(resp)\n\tif err != nil {\n\t\terr = autorest.NewErrorWithError(err, \"fabric.InfraRoleInstancesClient\", \"List\", resp, \"Failure responding to request\")\n\t}\n\n\treturn\n}", "func deserializeKidList(r io.Reader) ([]*kidOutput, error) {\n\tvar kidOutputs []*kidOutput\n\n\tfor {\n\t\tkidOutput, err := deserializeKidOutput(r)\n\t\tif err != nil {\n\t\t\tif err == io.EOF {\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\tkidOutputs = append(kidOutputs, kidOutput)\n\t}\n\n\treturn kidOutputs, nil\n}", "func (c ConfigGlyphSet) readJsonClusterMap() []CharSpec {\n\tdata, err := ioutil.ReadFile(c.Index)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tvar cji ConfigJsonIndex\n\terr = json.Unmarshal(data, &cji)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn cji.Map\n}", "func (a *HyperflexApiService) GetHyperflexClusterProfileList(ctx context.Context) ApiGetHyperflexClusterProfileListRequest {\n\treturn ApiGetHyperflexClusterProfileListRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (c *Client) Registers() ([]Register, error) {\n\n\tregisters := []Register{}\n\tpage := []Register{}\n\n\t// v is a version that is used to get registers by page.\n\tdata, v, err := c.ResourcePage(0, \"GET\", \"registers\")\n\terr = json.Unmarshal(data, &page)\n\tif err != nil {\n\t\tlog.Printf(\"error while unmarshalling: %s\", err)\n\t}\n\n\tregisters = append(registers, page...)\n\n\t// Use version to paginate through all pages\n\tfor len(page) > 0 {\n\t\tpage = []Register{}\n\t\tdata, v, err = c.ResourcePage(v, \"GET\", \"registers\")\n\t\terr = json.Unmarshal(data, &page)\n\t\tregisters = append(registers, page...)\n\t}\n\n\treturn registers, err\n}", "func (l *SubscriptionRegistrationList) Slice() []*SubscriptionRegistration {\n\tvar slice []*SubscriptionRegistration\n\tif l == nil {\n\t\tslice = make([]*SubscriptionRegistration, 0)\n\t} else {\n\t\tslice = make([]*SubscriptionRegistration, len(l.items))\n\t\tcopy(slice, l.items)\n\t}\n\treturn slice\n}", "func UnmarshalAnalyticsEngineClusterNodeSlice(s []interface{}) (slice []AnalyticsEngineClusterNode, err error) {\n\tfor _, v := range s {\n\t\tobjMap, ok := v.(map[string]interface{})\n\t\tif !ok {\n\t\t\terr = fmt.Errorf(\"slice element should be a map containing an instance of 'AnalyticsEngineClusterNode'\")\n\t\t\treturn\n\t\t}\n\t\tobj, e := UnmarshalAnalyticsEngineClusterNode(objMap)\n\t\tif e != nil {\n\t\t\terr = e\n\t\t\treturn\n\t\t}\n\t\tslice = append(slice, *obj)\n\t}\n\treturn\n}", "func (s *ClusterListener) ListMasters(inctx context.Context, in *protocol.Reference) (_ *protocol.ClusterNodeListResponse, err error) {\n\tdefer fail.OnExitConvertToGRPCStatus(inctx, &err)\n\tdefer fail.OnExitWrapError(inctx, &err, \"cannot list masters\")\n\n\tif s == nil {\n\t\treturn nil, fail.InvalidInstanceError()\n\t}\n\tif in == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"in\")\n\t}\n\tif inctx == nil {\n\t\treturn nil, fail.InvalidParameterCannotBeNilError(\"inctx\")\n\t}\n\n\tclusterName, _ := srvutils.GetReference(in)\n\tif clusterName == \"\" {\n\t\treturn nil, fail.InvalidRequestError(\"cluster name is missing\")\n\t}\n\n\tjob, err := PrepareJob(inctx, in.GetTenantId(), fmt.Sprintf(\"/cluster/%s/masters/list\", clusterName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer job.Close()\n\n\thandler := handlers.NewClusterHandler(job)\n\tlist, xerr := handler.ListMasters(clusterName)\n\tif xerr != nil {\n\t\treturn nil, xerr\n\t}\n\n\tout, xerr := converters.IndexedListOfClusterNodesFromResourceToProtocol(list)\n\tif xerr != nil {\n\t\treturn nil, xerr\n\t}\n\n\treturn out, nil\n}", "func ClusterField_Values() []string {\n\treturn []string{\n\t\tClusterFieldAttachments,\n\t\tClusterFieldConfigurations,\n\t\tClusterFieldSettings,\n\t\tClusterFieldStatistics,\n\t\tClusterFieldTags,\n\t}\n}", "func (p *SingletonClientGenerator) NodeList() (*apiv1.NodeList, error) {\n\n\tkubeclient, err := p.KubeClient()\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx := p.newRequestContext()\n\tdefer ctx.Cancel()\n\n\treturn kubeclient.CoreV1().Nodes().List(ctx, metav1.ListOptions{})\n}", "func (s *daskClusterLister) List(selector labels.Selector) (ret []*v1.DaskCluster, err error) {\n\terr = cache.ListAll(s.indexer, selector, func(m interface{}) {\n\t\tret = append(ret, m.(*v1.DaskCluster))\n\t})\n\treturn ret, err\n}", "func (a *Client) ListRegistry(params *ListRegistryParams) (*ListRegistryOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewListRegistryParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"listRegistry\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/Registries\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"http\", \"https\"},\n\t\tParams: params,\n\t\tReader: &ListRegistryReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*ListRegistryOK), nil\n\n}", "func List(ctx context.Context, filters container.FilterBuilder) ([]*types.Node, error) {\n\tres := []*types.Node{}\n\tvisit := func(ctx context.Context, cluster string, node *types.Node) {\n\t\tres = append(res, node)\n\t}\n\treturn res, list(ctx, visit, filters)\n}", "func (page ClusterListResultPage) Values() []Cluster {\n\tif page.clr.IsEmpty() {\n\t\treturn nil\n\t}\n\treturn *page.clr.Value\n}", "func List(client *golangsdk.ServiceClient, clusterId string) (r ListResult) {\n\t_, r.Err = client.Get(listURL(client, clusterId), &r.Body, nil)\n\treturn\n}", "func (f *FeatureGateClient) GetFeatureGateList(ctx context.Context) (*corev1alpha2.FeatureGateList, error) {\n\tgates := &corev1alpha2.FeatureGateList{}\n\terr := f.crClient.List(ctx, gates)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"could not get featuregates on cluster: %w\", err)\n\t}\n\treturn gates, nil\n}", "func getNatsClusterTypeMeta() metav1.TypeMeta {\n\treturn metav1.TypeMeta{\n\t\tKind: v1alpha2.CRDResourceKind,\n\t\tAPIVersion: v1alpha2.SchemeGroupVersion.String(),\n\t}\n}", "func (client *Client) ListClusterMembersWithCallback(request *ListClusterMembersRequest, callback func(response *ListClusterMembersResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *ListClusterMembersResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.ListClusterMembers(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}", "func GetClusterNames(clusterName string) (lines []string, err error) {\n\t// For now, only supports one server, so server name will be based on th cluster name\n\tcmd := exec.Command(\n\t\t\"docker\",\n\t\t\"ps\",\n\t\t\"-q\", // quiet output for parsing\n\t\t\"-a\", // show stopped nodes\n\t\t\"--no-trunc\", // don't truncate\n\t\t// filter for nodes with the cluster label\n\t\t\"--filter\", fmt.Sprintf(\"label=%s=%s\", clusterconfig.ClusterLabelKey, clusterName),\n\t\t// format to include the cluster name\n\t\t\"--format\", `{{.Names}}`,\n\t)\n\tlines, err = docker.ExecOutput(*cmd, false)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\t// currentlt only supports one server\n\t// if len(lines) != 1 {\n\t// \treturn nil, fmt.Errorf(\"k3scli don't support multiserver now...\")\n\t// }\n\treturn lines, nil\n}" ]
[ "0.67638403", "0.6247784", "0.54614764", "0.51669806", "0.50386775", "0.50192267", "0.49880686", "0.48626015", "0.48383284", "0.4753792", "0.47025165", "0.46732795", "0.4596685", "0.45840594", "0.45759216", "0.4574705", "0.4561554", "0.45477545", "0.4543731", "0.45330185", "0.45327294", "0.4532603", "0.45166948", "0.449373", "0.44783518", "0.4476868", "0.44584766", "0.44569865", "0.4455628", "0.44418418", "0.44388145", "0.4433517", "0.44326344", "0.44259253", "0.44210574", "0.44197664", "0.44194722", "0.4418944", "0.4407947", "0.43910185", "0.43718034", "0.4363244", "0.4346573", "0.43413648", "0.4338844", "0.43385345", "0.4327618", "0.4324333", "0.4323521", "0.43232542", "0.4323211", "0.43031567", "0.4302572", "0.43024206", "0.42927372", "0.42877722", "0.42793545", "0.4275293", "0.4273911", "0.42716756", "0.4262754", "0.42625132", "0.42602825", "0.42546836", "0.4251945", "0.4249414", "0.42334947", "0.4233009", "0.42268082", "0.42264932", "0.422607", "0.422607", "0.42227274", "0.4221072", "0.42202187", "0.42192942", "0.42123666", "0.42031926", "0.41937318", "0.41933772", "0.41905797", "0.41897023", "0.41738975", "0.41666362", "0.41604272", "0.41546917", "0.41454315", "0.41399893", "0.41392982", "0.4135347", "0.41346496", "0.41334522", "0.41330478", "0.4133038", "0.4131497", "0.4127283", "0.41250432", "0.4114661", "0.41142574", "0.41046894" ]
0.8615517
0
GetMetadataFromMetadataService gets metadata from OpenStack Metadata service.
func GetMetadataFromMetadataService() (*Metadata, error) { metadataURL := getMetadataURL(defaultMetadataVersion) resp, err := http.Get(metadataURL) if err != nil { return nil, fmt.Errorf("error fetching metadata from %s: %v", metadataURL, err) } defer resp.Body.Close() if resp.StatusCode != http.StatusOK { err = fmt.Errorf("unexpected status code when reading metadata from %s: %s", metadataURL, resp.Status) return nil, err } return parseMetadata(resp.Body) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewMetadataService(DB *gorm.DB) MetadataService {\n\treturn MetadataService{\n\t\tlibrary: db.NewLibraryStore(DB),\n\t\tmedia: db.NewMediaStore(DB),\n\t}\n}", "func Metadata(client *gophercloud.ServiceClient, id string) (r GetMetadataResult) {\n\tresp, err := client.Get(metadataURL(client, id), &r.Body, nil)\n\t_, r.Header, r.Err = gophercloud.ParseResponse(resp, err)\n\treturn\n}", "func NewMetadataService(listener net.Listener, creds credentialsSource, extraAllowedIps []*net.IPNet) (MetadataService, error) {\n\n\tallowIps := []*net.IPNet{}\n\tif extraAllowedIps != nil {\n\t\tallowIps = extraAllowedIps\n\t}\n\n\t// Add default allowed nets to the list\n\tfor _, ip := range []net.IP{net.IPv4(127, 0, 0, 1), net.IPv4(169, 254, 169, 254)} {\n\t\tipNet := &net.IPNet{IP: ip, Mask: net.CIDRMask(32, 32)}\n\t\tallowIps = append(allowIps, ipNet)\n\t}\n\n\treturn &metadataService{\n\t\tlistener: listener,\n\t\tcreds: creds,\n\t\tallowIps: allowIps,\n\t}, nil\n}", "func (s *Service) GetMetadata(ctx context.Context, in *authapi.GetMetadataRequest) (*authapi.GetMetadataResponse, error) {\n\tcurrentUser, ok := user.CurrentUserFromContext(ctx)\n\tif !ok || currentUser == nil {\n\t\treturn nil, errors.New(errors.ErrorUnauthenticated, \"\")\n\t}\n\n\tuserMetadata, err := currentUser.UserMetadata.String()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &authapi.GetMetadataResponse{\n\t\tUserMetadata: userMetadata,\n\t}, nil\n}", "func (r *edgeClusterServiceResolver) Metadata(ctx context.Context) (edgecluster.ObjectMetaResolverContract, error) {\n\treturn r.resolverCreator.NewObjectMetaResolver(ctx, r.service.Metadata)\n}", "func (c *jsiiProxy_CfnDeploymentStrategy) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (o ServiceOutput) Metadata() ServiceMetadataOutput {\n\treturn o.ApplyT(func(v *Service) ServiceMetadataOutput { return v.Metadata }).(ServiceMetadataOutput)\n}", "func (c *jsiiProxy_CfnDeploymentConfig) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDeployment) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (m *Machine) GetMetadata(ctx context.Context, v interface{}) error {\n\tresp, err := m.client.GetMmds(ctx)\n\tif err != nil {\n\t\tm.logger.Errorf(\"Getting metadata: %s\", err)\n\t\treturn err\n\t}\n\n\tpayloadData, err := json.Marshal(resp.Payload)\n\tif err != nil {\n\t\tm.logger.Errorf(\"Getting metadata failed parsing payload: %s\", err)\n\t\treturn err\n\t}\n\n\tif err := json.Unmarshal(payloadData, v); err != nil {\n\t\tm.logger.Errorf(\"Getting metadata failed parsing payload: %s\", err)\n\t\treturn err\n\t}\n\n\tm.logger.Printf(\"GetMetadata successful\")\n\treturn nil\n}", "func GetMetadata(c *gin.Context) {\n\ttitle := utils.FormatString(c.Param(\"title\"))\n\tmetadataStore := datastore.GetStore()\n\n\tdata, err := metadataStore.GetApplication(title)\n\tif err != nil {\n\t\tc.YAML(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": messages.MetadataNotFound, \"error\": err})\n\t} else {\n\t\tc.YAML(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": messages.MetadataFound, \"data\": data})\n\t}\n}", "func (c *jsiiProxy_CfnStudio) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDeploymentGroup) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (o LookupServiceResultOutput) Metadata() ObjectMetaResponseOutput {\n\treturn o.ApplyT(func(v LookupServiceResult) ObjectMetaResponse { return v.Metadata }).(ObjectMetaResponseOutput)\n}", "func (o *IamServiceProviderAllOf) GetMetadata() string {\n\tif o == nil || o.Metadata == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Metadata\n}", "func (c *jsiiProxy_CfnStudioComponent) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (d *DescriptorService) GetMetadata(ctx context.Context, payload *models.GetMetadata) (*models.MetadataFile, error) {\n\tfilename := path.Join(payload.Namespace, payload.Name, \"meta.json\")\n\tdata, err := d.Store.Get(ctx, filename)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer data.Close()\n\tfile := &models.MetadataFile{}\n\tb, err := ioutil.ReadAll(data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(b, file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn file, nil\n}", "func (c *jsiiProxy_CfnRepository) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (f *TemplateFinder) GetMetadata(template *templatev1.Template, vm *ovirtsdk.Vm) (map[string]string, map[string]string, error) {\n\tos, err := f.osFinder.FindOperatingSystem(vm)\n\tif err != nil {\n\t\treturn map[string]string{}, map[string]string{}, err\n\t}\n\tworkload := getWorkload(vm)\n\tflavor := defaultFlavor\n\tlabels := templates.OSLabelBuilder(&os, &workload, &flavor)\n\n\tkey := fmt.Sprintf(templates.TemplateNameOsAnnotation, os)\n\tannotations := map[string]string{\n\t\tkey: template.GetAnnotations()[key],\n\t}\n\treturn labels, annotations, nil\n}", "func (c *jsiiProxy_CfnConfig) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (drc *DummyRegistryClient) GetImageMetadata(in, et string) (md Metadata, err error) {\n\tres := drc.Called(in, et)\n\treturn res.Get(0).(Metadata), res.Error(1)\n}", "func (cli *FakeDatabaseClient) FetchServiceImageMetaData(ctx context.Context, in *dbdpb.FetchServiceImageMetaDataRequest, opts ...grpc.CallOption) (*dbdpb.FetchServiceImageMetaDataResponse, error) {\n\tatomic.AddInt32(&cli.fetchServiceImageMetaDataCnt, 1)\n\tif cli.methodToResp == nil {\n\t\treturn nil, nil\n\t}\n\tmethod := \"FetchServiceImageMetaData\"\n\tif resp, ok := cli.methodToResp[method]; ok {\n\t\treturn resp.(*dbdpb.FetchServiceImageMetaDataResponse), nil\n\t}\n\treturn nil, nil\n\n}", "func (c *jsiiProxy_CfnLayer) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDataflowEndpointGroup) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnMaster) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnFileSystem) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnVolume) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDevEndpoint) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func getHostMetadata() (Metadata, error) {\n\tcontent, err := ioutil.ReadFile(metadataFile)\n\tif err == nil {\n\t\tvar metadata Metadata\n\t\tif err = json.Unmarshal(content, &metadata); err == nil {\n\t\t\treturn metadata, nil\n\t\t}\n\t}\n\n\tlog.Logf(\"[Telemetry] Request metadata from wireserver\")\n\n\treq, err := http.NewRequest(\"GET\", metadataURL, nil)\n\tif err != nil {\n\t\treturn Metadata{}, err\n\t}\n\n\treq.Header.Set(\"Metadata\", \"True\")\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn Metadata{}, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tmetareport := metadataWrapper{}\n\n\tif resp.StatusCode != http.StatusOK {\n\t\terr = fmt.Errorf(\"[Telemetry] Request failed with HTTP error %d\", resp.StatusCode)\n\t} else if resp.Body != nil {\n\t\terr = json.NewDecoder(resp.Body).Decode(&metareport)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"[Telemetry] Unable to decode response body due to error: %s\", err.Error())\n\t\t}\n\t} else {\n\t\terr = fmt.Errorf(\"[Telemetry] Response body is empty\")\n\t}\n\n\treturn metareport.Metadata, err\n}", "func (c *Client) GetMetaData(ctx context.Context, name string) (*MetaData, error) {\n\tmetadata, err := c.getMetaDataFrom2022API(ctx, name)\n\tif err != nil {\n\t\tlog.Printf(\"[DEBUG] Falling back to ARM Metadata version 2019-05-01 for %s\", c.endpoint)\n\t\tmetadata, err = c.getMetaDataFrom2019API(ctx, name)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"retrieving metadata from the 2022-09-01 and 2019-05-01 APIs: %+v\", err)\n\t\t}\n\t}\n\n\treturn &MetaData{\n\t\tAuthentication: Authentication{\n\t\t\tAudiences: metadata.Authentication.Audiences,\n\t\t\tLoginEndpoint: metadata.Authentication.LoginEndpoint,\n\t\t\tIdentityProvider: metadata.Authentication.IdentityProvider,\n\t\t\tTenant: metadata.Authentication.Tenant,\n\t\t},\n\t\tDnsSuffixes: DnsSuffixes{\n\t\t\tAttestation: metadata.Suffixes.AttestationEndpoint,\n\t\t\tFrontDoor: metadata.Suffixes.AzureFrontDoorEndpointSuffix,\n\t\t\tKeyVault: metadata.Suffixes.KeyVaultDns,\n\t\t\tManagedHSM: metadata.Suffixes.MhsmDns,\n\t\t\tMariaDB: metadata.Suffixes.MariadbServerEndpoint,\n\t\t\tMySql: metadata.Suffixes.MysqlServerEndpoint,\n\t\t\tPostgresql: metadata.Suffixes.PostgresqlServerEndpoint,\n\t\t\tSqlServer: metadata.Suffixes.SqlServerHostname,\n\t\t\tStorage: metadata.Suffixes.Storage,\n\t\t\tStorageSync: metadata.Suffixes.StorageSyncEndpointSuffix,\n\t\t\tSynapse: metadata.Suffixes.SynapseAnalytics,\n\t\t},\n\t\tName: metadata.Name,\n\t\tResourceIdentifiers: ResourceIdentifiers{\n\t\t\tAttestation: normalizeResourceId(metadata.AttestationResourceId),\n\t\t\tBatch: normalizeResourceId(metadata.Batch),\n\t\t\tLogAnalytics: normalizeResourceId(metadata.LogAnalyticsResourceId),\n\t\t\tMedia: normalizeResourceId(metadata.Media),\n\t\t\tMicrosoftGraph: normalizeResourceId(metadata.MicrosoftGraphResourceId),\n\t\t\tOSSRDBMS: normalizeResourceId(metadata.OssrDbmsResourceId),\n\t\t\tSynapse: normalizeResourceId(metadata.SynapseAnalyticsResourceId),\n\t\t},\n\t\tResourceManagerEndpoint: metadata.ResourceManager,\n\t}, nil\n}", "func buildServiceMetadata(host, name, namespace string) *structpb.Value {\n\treturn &structpb.Value{\n\t\tKind: &structpb.Value_StructValue{\n\t\t\tStructValue: &structpb.Struct{\n\t\t\t\tFields: map[string]*structpb.Value{\n\t\t\t\t\t// service fqdn\n\t\t\t\t\t\"host\": {\n\t\t\t\t\t\tKind: &structpb.Value_StringValue{\n\t\t\t\t\t\t\tStringValue: host,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// short name of the service\n\t\t\t\t\t\"name\": {\n\t\t\t\t\t\tKind: &structpb.Value_StringValue{\n\t\t\t\t\t\t\tStringValue: name,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\t// namespace of the service\n\t\t\t\t\t\"namespace\": {\n\t\t\t\t\t\tKind: &structpb.Value_StringValue{\n\t\t\t\t\t\t\tStringValue: namespace,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n}", "func (c *jsiiProxy_CfnSecurityConfiguration) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (a *Client) GetOpenstackMetadata(params *GetOpenstackMetadataParams, authInfo runtime.ClientAuthInfoWriter) (*GetOpenstackMetadataOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetOpenstackMetadataParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"GetOpenstackMetadata\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/api/v1/openstack/metadata\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &GetOpenstackMetadataReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn result.(*GetOpenstackMetadataOK), nil\n\n}", "func (c *jsiiProxy_CfnModuleVersion) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnApp) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDetector) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnInstance) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func getHostMetadata() (Metadata, error) {\n\tcontent, err := ioutil.ReadFile(metadataFile)\n\tif err == nil {\n\t\tvar metadata Metadata\n\t\tif err = json.Unmarshal(content, &metadata); err == nil {\n\t\t\ttelemetryLogger.Printf(\"[Telemetry] Returning hostmetadata from state\")\n\t\t\treturn metadata, nil\n\t\t}\n\t}\n\n\treq, err := http.NewRequest(\"GET\", metadataURL, nil)\n\tif err != nil {\n\t\treturn Metadata{}, err\n\t}\n\n\treq.Header.Set(\"Metadata\", \"True\")\n\tclient := &http.Client{}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn Metadata{}, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tmetareport := metadataWrapper{}\n\n\tif resp.StatusCode != http.StatusOK {\n\t\terr = fmt.Errorf(\"[Telemetry] Request failed with HTTP error %d\", resp.StatusCode)\n\t} else if resp.Body != nil {\n\t\terr = json.NewDecoder(resp.Body).Decode(&metareport)\n\t\tif err != nil {\n\t\t\terr = fmt.Errorf(\"[Telemetry] Unable to decode response body due to error: %s\", err.Error())\n\t\t}\n\t} else {\n\t\terr = fmt.Errorf(\"[Telemetry] Response body is empty\")\n\t}\n\n\treturn metareport.Metadata, err\n}", "func (computeService Service) ServerMetadata(serverID string) (map[string]string, error) {\n\tm := serverMetadataContainer{}\n\treqURL, err := computeService.buildRequestURL(\"/servers/\", serverID, \"/metadata\")\n\tif err != nil {\n\t\treturn m.Metadata, err\n\t}\n\terr = misc.GetJSON(reqURL, computeService.authenticator, &m)\n\treturn m.Metadata, err\n}", "func (c *jsiiProxy_CfnThreatIntelSet) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDatabase) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnPublicRepository) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func GetMetadata(p string) (string, error) {\n\tif len(p) > 0 && p[0] == '/' {\n\t\tp = p[1:]\n\t}\n\turl := metadataURL + p\n\ttimeout := time.Duration(metadataTimeout)\n\tclient := http.Client{\n\t\tTimeout: timeout,\n\t}\n\tresp, err := client.Get(url)\n\tif err != nil || resp.StatusCode/200 != 1 {\n\t\treturn \"\", err\n\t}\n\n\tdefer resp.Body.Close()\n\tbody, err := ioutil.ReadAll(resp.Body)\n\treturn string(body), err\n}", "func (a *AdminClient) GetMetadata(topic *string, allTopics bool, timeoutMs int) (*Metadata, error) {\n\terr := a.verifyClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn getMetadata(a, topic, allTopics, timeoutMs)\n}", "func (c *jsiiProxy_CfnStack) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnStack) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnSchema) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnWorkflow) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnApplication) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnApplication) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func GetMetadata(config common.Config) ([]byte, error) {\n\tmetadataOpts := MetadataOptions{\n\t\tID: \"*\",\n\t\tFormat: \"COMPACT\",\n\t\tMType: \"METADATA-SYSTEM\",\n\t}\n\n\t// should we throw an err here too?\n\tsession, err := config.Initialize()\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)\n\tdefer cancel()\n\n\tcapability, err := rets.Login(ctx, session, rets.LoginRequest{URL: config.URL})\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\tdefer rets.Logout(ctx, session, rets.LogoutRequest{URL: capability.Logout})\n\n\treader, err := rets.MetadataStream(ctx, session, rets.MetadataRequest{\n\t\tURL: capability.GetMetadata,\n\t\tFormat: metadataOpts.Format,\n\t\tMType: metadataOpts.MType,\n\t\tID: metadataOpts.ID,\n\t})\n\n\tdefer reader.Close()\n\tif err != nil {\n\t\treturn make([]byte, 0), err\n\t}\n\treturn ioutil.ReadAll(reader)\n}", "func GetNewMetadataProvider(svc EC2Metadata, clientset kubernetes.Interface) (MetadataProvider, error) {\n\t// check if it is running in ECS otherwise default fall back to ec2\n\tklog.Info(\"getting MetadataService...\")\n\tif isDriverBootedInECS() {\n\t\tklog.Info(\"detected driver is running in ECS, returning task metadata...\")\n\t\treturn taskMetadataProvider{taskMetadataService: &taskMetadata{}}, nil\n\t} else if svc.Available() {\n\t\tklog.Info(\"retrieving metadata from EC2 metadata service\")\n\t\treturn ec2MetadataProvider{ec2MetadataService: svc}, nil\n\t} else if clientset != nil {\n\t\tklog.Info(\"retrieving metadata from Kubernetes API\")\n\t\treturn kubernetesApiMetadataProvider{api: clientset}, nil\n\t} else {\n\t\treturn nil, fmt.Errorf(\"could not create MetadataProvider from any source\")\n\t}\n}", "func (c *jsiiProxy_CfnTable) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnEnvironment) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnSchemaVersionMetadata) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnSchemaVersion) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (d *Driver) GetMetadata(id string) (map[string]string, error) {\n\tlogrus.Debugf(\"GetMetadata\")\n\treturn nil, nil\n}", "func (d *Driver) GetMetadata(id string) (map[string]string, error) {\n\treturn map[string]string{\n\t\t\"Mountpoint\": d.mountPath(id),\n\t\t\"Dataset\": d.zfsPath(id),\n\t}, nil\n}", "func (c *jsiiProxy_CfnHostedConfigurationVersion) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnStreamingImage) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnInput) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func GetMetaData(config *GCPConfig) *GCPMetadata {\n\twg := sync.WaitGroup{}\n\twg.Add(3)\n\thttpClient := &http.Client{\n\t\tTimeout: config.timeout,\n\t}\n\tmetadata := &GCPMetadata{}\n\tgo func() {\n\t\tmetadata.ContainerID = getContainerID(httpClient, config)\n\t\twg.Done()\n\t}()\n\tgo func() {\n\t\tmetadata.Region = getRegion(httpClient, config)\n\t\twg.Done()\n\t}()\n\tgo func() {\n\t\tmetadata.ProjectID = getProjectID(httpClient, config)\n\t\twg.Done()\n\t}()\n\twg.Wait()\n\treturn metadata\n}", "func (c *jsiiProxy_CfnFilter) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (op *EnableServiceOperation) Metadata() (*serviceusagepb.OperationMetadata, error) {\n\tvar meta serviceusagepb.OperationMetadata\n\tif err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {\n\t\treturn nil, nil\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\treturn &meta, nil\n}", "func (legacyStorageImpl) GetMetadata(ctx context.Context, prefix string) ([]*api.PrefixMetadata, error) {\n\tmd, _, err := getMetadataImpl(ctx, prefix)\n\treturn md, err\n}", "func (c *jsiiProxy_CfnRegistry) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnConnection) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDetectorModel) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnResourceVersion) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnConfigurationProfile) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnStackSet) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnIPSet) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnDataCatalogEncryptionSettings) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func GetMetadataForVersion(c *gin.Context) {\n\ttitle := utils.FormatString(c.Param(\"title\"))\n\tversion := utils.FormatString(c.Param(\"version\"))\n\n\tmetadataStore := datastore.GetStore()\n\tdata, err := metadataStore.GetApplicationWithVersion(title, version)\n\tif err != nil {\n\t\tc.YAML(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": messages.MetadataNotFound, \"error\": err})\n\t} else {\n\t\tc.YAML(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": messages.MetadataFound, \"data\": data})\n\t}\n}", "func GetAllMetadata(c *gin.Context) {\n\tmetadataStore := datastore.GetStore()\n\n\tdata, err := metadataStore.GetAllMetadata()\n\tif err != nil {\n\t\tc.YAML(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": messages.MetadataNotFound, \"error\": err})\n\t} else {\n\t\tc.YAML(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": messages.MetadataFound, \"data\": data})\n\t}\n}", "func (c *jsiiProxy_CfnContact) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func NewMetadataClient(commandConfig *config.CommandConfig) Client {\n\tclient := ssm.New(commandConfig.Session)\n\tclient.Handlers.Build.PushBackNamed(clients.CustomUserAgentHandler())\n\treturn &metadataClient{\n\t\tclient: client,\n\t\tregion: aws.StringValue(commandConfig.Session.Config.Region),\n\t}\n}", "func (op *StartPaidServiceOperation) Metadata() (*channelpb.OperationMetadata, error) {\n\tvar meta channelpb.OperationMetadata\n\tif err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {\n\t\treturn nil, nil\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\treturn &meta, nil\n}", "func (c *jsiiProxy_CfnMacro) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func PatchServiceTemplateMetadata(t *testing.T, clients *Clients, svc *v1alpha1.Service, metadata metav1.ObjectMeta) (*v1alpha1.Service, error) {\n\tnewSvc := svc.DeepCopy()\n\tnewSvc.Spec.ConfigurationSpec.Template.ObjectMeta = metadata\n\tLogResourceObject(t, ResourceObjects{Service: newSvc})\n\tpatchBytes, err := createPatch(svc, newSvc)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn clients.ServingClient.Services.Patch(svc.ObjectMeta.Name, types.JSONPatchType, patchBytes, \"\")\n}", "func (c *jsiiProxy_CfnPreset) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (a *StartupConfigurationApiService) GetInstallerMetadata(ctx _context.Context) ApiGetInstallerMetadataRequest {\n\treturn ApiGetInstallerMetadataRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (m MockKeyring) GetMetadata(key string) (keyring.Metadata, error) {\n\treturn keyring.Metadata{}, nil\n}", "func (p *Pod) GetServiceMeta(n string) map[string]string {\n\tif metaStr, ok := p.Pod.ObjectMeta.Annotations[ConsulServiceMetaOverride+n]; ok {\n\t\treturn ParseMap(metaStr)\n\t}\n\n\tif metaStr, ok := p.Pod.ObjectMeta.Annotations[ConsulServiceMeta]; ok {\n\t\treturn ParseMap(metaStr)\n\t}\n\n\treturn nil\n}", "func (c *jsiiProxy_CfnRegistryPolicy) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnClassifier) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (op *BatchEnableServicesOperation) Metadata() (*serviceusagepb.OperationMetadata, error) {\n\tvar meta serviceusagepb.OperationMetadata\n\tif err := op.lro.Metadata(&meta); err == longrunning.ErrNoMetadata {\n\t\treturn nil, nil\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\treturn &meta, nil\n}", "func (c *jsiiProxy_CfnReplicationConfiguration) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnPartition) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnMLTransform) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (o *IamServiceProviderAllOf) SetMetadata(v string) {\n\to.Metadata = &v\n}", "func getSourceMetadata(ctx context.Context, log lg.Log, src *source.Source, db sqlz.DB) (*source.Metadata, error) {\n\tmd := &source.Metadata{SourceType: Type, DBDriverType: Type, Handle: src.Handle, Location: src.Location}\n\n\tg, gctx := errgroup.WithContext(ctx)\n\n\tg.Go(func() error {\n\t\treturn setSourceSummaryMeta(gctx, db, md)\n\t})\n\n\tg.Go(func() error {\n\t\tvar err error\n\t\tmd.DBVars, err = getDBVarsMeta(gctx, log, db)\n\t\treturn err\n\t})\n\n\tg.Go(func() error {\n\t\tvar err error\n\t\tmd.Tables, err = getAllTblMetas(gctx, log, db)\n\t\treturn err\n\t})\n\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn md, nil\n}", "func MetadataClient(config *Config) (metadata.ReadWriteClient, error) {\n\tif config.Metadata == constants.GrafeasMetadata {\n\t\treturn grafeas.New(config.Grafeas, config.Certs)\n\t}\n\tif config.Metadata == constants.ContainerAnalysisMetadata {\n\t\treturn containeranalysis.NewCache()\n\t}\n\treturn nil, fmt.Errorf(\"unsupported backend %v\", config.Metadata)\n}", "func extractMetadata(a *expr.MappedAttributeExpr, service *expr.AttributeExpr, scope *codegen.NameScope) []*MetadataData {\n\tvar metadata []*MetadataData\n\tctx := serviceTypeContext(\"\", scope)\n\tcodegen.WalkMappedAttr(a, func(name, elem string, required bool, c *expr.AttributeExpr) error {\n\t\tvar (\n\t\t\tvarn string\n\t\t\tfieldName string\n\t\t\tpointer bool\n\n\t\t\tarr = expr.AsArray(c.Type)\n\t\t\tmp = expr.AsMap(c.Type)\n\t\t\ttypeRef = scope.GoTypeRef(unalias(c))\n\t\t\tft = service.Type\n\t\t)\n\t\t{\n\t\t\tvarn = scope.Name(codegen.Goify(name, false))\n\t\t\tfieldName = codegen.Goify(name, true)\n\t\t\tif !expr.IsObject(service.Type) {\n\t\t\t\tfieldName = \"\"\n\t\t\t} else {\n\t\t\t\tpointer = service.IsPrimitivePointer(name, true)\n\t\t\t\tft = service.Find(name).Type\n\t\t\t}\n\t\t\tif pointer {\n\t\t\t\ttypeRef = \"*\" + typeRef\n\t\t\t}\n\t\t}\n\t\tmetadata = append(metadata, &MetadataData{\n\t\t\tName: elem,\n\t\t\tAttributeName: name,\n\t\t\tDescription: c.Description,\n\t\t\tFieldName: fieldName,\n\t\t\tFieldType: ft,\n\t\t\tVarName: varn,\n\t\t\tRequired: required,\n\t\t\tType: c.Type,\n\t\t\tTypeName: scope.GoTypeName(unalias(c)),\n\t\t\tTypeRef: typeRef,\n\t\t\tPointer: pointer,\n\t\t\tSlice: arr != nil,\n\t\t\tStringSlice: arr != nil && arr.ElemType.Type.Kind() == expr.StringKind,\n\t\t\tMap: mp != nil,\n\t\t\tMapStringSlice: mp != nil &&\n\t\t\t\tmp.KeyType.Type.Kind() == expr.StringKind &&\n\t\t\t\tmp.ElemType.Type.Kind() == expr.ArrayKind &&\n\t\t\t\texpr.AsArray(mp.ElemType.Type).ElemType.Type.Kind() == expr.StringKind,\n\t\t\tValidate: codegen.AttributeValidationCode(c, nil, ctx, required, false, varn, name),\n\t\t\tDefaultValue: c.DefaultValue,\n\t\t\tExample: c.Example(expr.Root.API.ExampleGenerator),\n\t\t})\n\t\treturn nil\n\t})\n\treturn metadata\n}", "func (c *jsiiProxy_CfnCrawler) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (c *jsiiProxy_CfnMissionProfile) GetMetadata(key *string) interface{} {\n\tvar returns interface{}\n\n\t_jsii_.Invoke(\n\t\tc,\n\t\t\"getMetadata\",\n\t\t[]interface{}{key},\n\t\t&returns,\n\t)\n\n\treturn returns\n}", "func (m *Metadata) GetMetadata() Metadata {\n\treturn *m\n}", "func GetMetadata() (*Metadata, error) {\n\tif data == nil {\n\t\tdate, err := time.Parse(time.RFC1123Z, CompiledOn)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tdate = time.Now()\n\t\t}\n\t\tdata = &Metadata{RunningSince: time.Now(), BuildSHA: BuildCommitSHA, CompiledOn: date}\n\t\terr = json.Unmarshal([]byte(VersionInfo), data)\n\t\treturn data, err\n\t}\n\treturn data, nil\n}", "func (d *Driver) GetMetadata(id string) (map[string]string, error) {\n\tlogrus.Debugf(\"secureoverlay2: GetMetadata called w. id: %s\", id)\n\tdir := d.dir(id)\n\tif _, err := os.Stat(dir); err != nil {\n\t\treturn nil, err\n\t}\n\n\tmetadata := map[string]string{\n\t\t\"WorkDir\": path.Join(dir, \"work\"),\n\t\t\"MergedDir\": path.Join(dir, \"merged\"),\n\t\t\"UpperDir\": path.Join(dir, \"diff\"),\n\t}\n\n\tlowerDirs, err := d.getLowerDirs(id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(lowerDirs) > 0 {\n\t\tmetadata[\"LowerDir\"] = strings.Join(lowerDirs, \":\")\n\t}\n\n\t// additional data\n\ts, err := d.getSecurityMetaDataForID(id, \"\")\n\tswitch {\n\tcase err == nil:\n\t\tif s.RequiresConfidentiality {\n\t\t\t// embedd security meta-data if it is a secured image.\n\t\t\t// Note: only including it for secured images allows non-secured images still\n\t\t\t// to work with Manifest Schema 1 of registry. For secured images, in particular with\n\t\t\t// integrity, Schema 2 is essential to get the secure content-addressable nature of the image.\n\n\t\t\t// do some clean-up of unneeded params to declutter config/docker history\n\t\t\tif !s.RequiresConfidentiality {\n\t\t\t\ts.KeyHandle = \"\"\n\t\t\t\ts.KeyType = \"\"\n\t\t\t\ts.KeyTypeOption = \"\"\n\t\t\t\ts.KeyDesc = \"\"\n\t\t\t\ts.KeySize = \"\"\n\t\t\t\ts.KeyFilePath = \"\"\n\t\t\t\ts.CryptCipher = \"\"\n\t\t\t}\n\t\t\tbytes, _ := s.Encode()\n\t\t\tlogrus.Debugf(\"secureoverlay2: GetMetadata, adding (encoded) security meta-data %s\", s)\n\t\t\tmetadata[\"security-meta-data\"] = string(bytes)\n\t\t} else {\n\t\t\tctx = context.WithValue(context.TODO(), \"\", \"\")\n\t\t\tlogrus.Debug(\"secureoverlay2: GetMetadata, security meta-data indicates unsecured layer, skip security meta data addition\")\n\t\t}\n\tcase os.IsNotExist(err):\n\t\tlogrus.Debugf(\"secureoverlay2: GetMetadata, no security meta-data found to be added: %v\", err)\n\tdefault:\n\t\treturn nil, err\n\t}\n\n\tlogrus.Debugf(\"secureoverlay2: GetMetadata return w. metadata: %v\", metadata)\n\n\treturn metadata, nil\n}", "func retrieveComputeInstanceMetadata() (metadata ComputeInstanceMetadata, err error) {\n\tvar m ComputeInstanceMetadata\n\tc := &http.Client{}\n\n\treq, _ := http.NewRequest(\"GET\", azureInstanceMetadataEndpoint+\"/compute\", nil)\n\treq.Header.Add(\"Metadata\", \"True\")\n\tq := req.URL.Query()\n\tq.Add(\"format\", \"json\")\n\tq.Add(\"api-version\", \"2017-12-01\")\n\treq.URL.RawQuery = q.Encode()\n\n\tresp, err := c.Do(req)\n\tif err != nil {\n\t\treturn m, fmt.Errorf(\"sending Azure Instance Metadata Service request failed: %v\", err)\n\t}\n\tdefer resp.Body.Close()\n\n\trawJSON, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn m, fmt.Errorf(\"reading response body failed: %v\", err)\n\t}\n\tif err := json.Unmarshal(rawJSON, &m); err != nil {\n\t\treturn m, fmt.Errorf(\"unmarshaling JSON response failed: %v\", err)\n\t}\n\n\treturn m, nil\n}" ]
[ "0.6673847", "0.6563827", "0.64241195", "0.62847346", "0.622382", "0.61060303", "0.60837245", "0.60770833", "0.6072493", "0.6065173", "0.6050431", "0.6044823", "0.5939721", "0.5935539", "0.59329593", "0.59249985", "0.5895306", "0.5889545", "0.5867091", "0.5848588", "0.583497", "0.5827703", "0.58209735", "0.5819981", "0.5813347", "0.5803211", "0.57830423", "0.5771958", "0.5770793", "0.576484", "0.5749971", "0.5711777", "0.5698469", "0.5686926", "0.5674331", "0.56720084", "0.5655712", "0.5653507", "0.5639328", "0.56356144", "0.5627667", "0.5620923", "0.5613087", "0.56123906", "0.5609036", "0.5609036", "0.5608653", "0.5604163", "0.5604068", "0.5604068", "0.55993176", "0.55974704", "0.55907524", "0.5588392", "0.55820656", "0.5577851", "0.5577467", "0.5575048", "0.5570868", "0.5561587", "0.5560983", "0.554808", "0.55418235", "0.5515348", "0.550721", "0.5503645", "0.54991674", "0.5490968", "0.5462759", "0.5462375", "0.54601586", "0.5455513", "0.54550034", "0.54451424", "0.54406244", "0.5428859", "0.5424903", "0.54200375", "0.54186714", "0.5416603", "0.54037744", "0.539564", "0.5393846", "0.5393694", "0.5383802", "0.5381465", "0.5379598", "0.53781646", "0.5377948", "0.5364017", "0.5348235", "0.5340538", "0.533395", "0.5332547", "0.5326353", "0.53236896", "0.5316136", "0.5306618", "0.52952504", "0.5289964" ]
0.80462337
0
X encodes bytes to string
func X(val []byte) string { return hex.EncodeToString(val) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Hexlify(inputBytes []byte) string {\r\n\treturn hex.EncodeToString(inputBytes)\r\n}", "func (m *Myself) toxstring() string {\n\tm.mutex.RLock()\n\tdefer m.mutex.RUnlock()\n\treturn m.toNode().toxstring()\n}", "func F(bytes []byte) string {\n\treturn fmt.Sprintf(\"%x\", bytes)\n}", "func (x XID) String() string {\n\tdst := make([]byte, 20)\n\tx.encode(dst)\n\treturn b2s(dst)\n}", "func (me TCryptoBinary) String() string { return xsdt.Base64Binary(me).String() }", "func XMLBytes(w http.ResponseWriter, status int, b []byte) (err error) {\n\tSetContentType(w, ApplicationXMLCharsetUTF8)\n\tw.WriteHeader(status)\n\n\tif _, err = w.Write(xmlHeaderBytes); err == nil {\n\t\t_, err = w.Write(b)\n\t}\n\n\treturn\n}", "func Hexlify(data []byte) string {\n\treturn hex.EncodeToString(data)\n}", "func (n *Node) toxstring() string {\n\treturn strings.Replace(n.Nodestr, \"/\", \"+\", -1)\n}", "func BytesString(hash []byte) string {\n\treturn fmt.Sprintf(\"%X\", hash)\n}", "func HexEncodeToString(b []byte) string {\n\treturn \"0x\" + hex.EncodeToString(b)\n}", "func encodeUxArray(obj *UxArray) ([]byte, error) {\n\tn := encodeSizeUxArray(obj)\n\tbuf := make([]byte, n)\n\n\tif err := encodeUxArrayToBuffer(buf, obj); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn buf, nil\n}", "func HexEncode(bs []byte) string {\n\treturn fmt.Sprintf(\"0x%s\", hex.EncodeToString(bs))\n}", "func stringEncoder(e *encodeState, v reflect.Value) error {\n\tb := reflect.ValueOf([]byte(v.String()))\n\n\treturn e.marshal(b)\n}", "func Encode(b []byte) string {\n\tenc := make([]byte, len(b)*2+2)\n\tcopy(enc, \"0x\")\n\thex.Encode(enc[2:], b)\n\treturn string(enc)\n}", "func Encode(b []byte) string {\n\tenc := make([]byte, len(b)*2+2)\n\tcopy(enc, \"0x\")\n\thex.Encode(enc[2:], b)\n\treturn string(enc)\n}", "func encodeByteSequence(v [][]byte) []byte {\n\tvar hexstrings []string\n\tfor _, a := range v {\n\t\thexstrings = append(hexstrings, hexutil.Encode(a))\n\t}\n\treturn []byte(strings.Join(hexstrings, \",\"))\n}", "func byteString(b []byte) string {\n\tif len(b) < 1 {\n\t\treturn \"\"\n\t}\n\n\ts := make([]byte, len(b)*3-1)\n\ti, j := 0, 0\n\tfor n := len(b) - 1; i < n; i, j = i+1, j+3 {\n\t\ts[j+0] = hex[(b[i] >> 4)]\n\t\ts[j+1] = hex[(b[i] & 0x0f)]\n\t\ts[j+2] = ' '\n\t}\n\ts[j+0] = hex[(b[i] >> 4)]\n\ts[j+1] = hex[(b[i] & 0x0f)]\n\treturn string(s)\n}", "func (wtgb *WorkerTypeGroupBy) StringX(ctx context.Context) string {\n\tv, err := wtgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (s StorageDataRaw) Hex() string {\n\treturn fmt.Sprintf(\"%#x\", s)\n}", "func Encode(data interface{}) []byte {\n v := Value{data}\n return v.Encode()\n}", "func (lbgb *LatestBlockGroupBy) StringX(ctx context.Context) string {\n\tv, err := lbgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func encodeResponse(response interface{}) []byte {\n\tvar bytesBuffer bytes.Buffer\n\tbytesBuffer.WriteString(xml.Header)\n\te := xml.NewEncoder(&bytesBuffer)\n\te.Encode(response)\n\treturn bytesBuffer.Bytes()\n}", "func (_ XmlEncoder) Encode(v ...interface{}) (string, error) {\n\tvar buf bytes.Buffer\n\tif _, err := buf.Write([]byte(xml.Header)); err != nil {\n\t\treturn \"\", err\n\t}\n\tb, err := xml.Marshal(v)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tif _, err := buf.Write(b); err != nil {\n\t\treturn \"\", err\n\t}\n\treturn buf.String(), err\n}", "func bytesToStr(data []byte) string {\n\treturn string(data)\n}", "func (b Bytes) HexStr(noPrefix ...bool) string { return ToHex(b.Bytes(), noPrefix...) }", "func (rgb *ReceiptGroupBy) StringX(ctx context.Context) string {\n\tv, err := rgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (wgb *WifiGroupBy) StringX(ctx context.Context) string {\n\tv, err := wgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (e Bytes) String() string {\n\treturn fmt.Sprintf(\"%v\", e)\n}", "func RatGobEncode(x *big.Rat,) ([]byte, error)", "func (k *PublicKey) Bytes(compressed bool) []byte {\n\tx := k.X.Bytes()\n\tif len(x) < 32 {\n\t\tfor i := 0; i < 32-len(x); i++ {\n\t\t\tx = append([]byte{0}, x...)\n\t\t}\n\t}\n\n\tif compressed {\n\t\t// If odd\n\t\tif k.Y.Bit(0) != 0 {\n\t\t\treturn bytes.Join([][]byte{{0x03}, x}, nil)\n\t\t}\n\n\t\t// If even\n\t\treturn bytes.Join([][]byte{{0x02}, x}, nil)\n\t}\n\n\ty := k.Y.Bytes()\n\tif len(y) < 32 {\n\t\tfor i := 0; i < 32-len(y); i++ {\n\t\t\ty = append([]byte{0}, y...)\n\t\t}\n\t}\n\n\treturn bytes.Join([][]byte{{0x04}, x, y}, nil)\n}", "func (ougb *OrgUnitGroupBy) StringX(ctx context.Context) string {\n\tv, err := ougb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (enc *encoder) String() string {\n\treturn enc.buf.String()\n}", "func (c *Ctx) XMLBytes(code int, b []byte) (err error) {\n\n\tc.response.Header().Set(ContentType, ApplicationXMLCharsetUTF8)\n\tc.response.WriteHeader(code)\n\n\tif _, err = c.response.Write([]byte(xml.Header)); err == nil {\n\t\t_, err = c.response.Write(b)\n\t}\n\n\treturn\n}", "func (v ByteVec) String() string { return string([]byte(v)) }", "func hex(data []byte) string {\n\tbuf := make([]byte, 4*len(data))\n\tconst digits = \"0123456789abcdef\"\n\tfor i, b := range data {\n\t\tbuf[i*4] = '\\\\'\n\t\tbuf[(i*4)+1] = 'x'\n\t\tbuf[(i*4)+2] = digits[b>>4]\n\t\tbuf[(i*4)+3] = digits[b&0x0F]\n\t}\n\treturn string(buf)\n}", "func (nimgb *NetInterfaceModeGroupBy) StringX(ctx context.Context) string {\n\tv, err := nimgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func Bytes(x Value) []byte {\n\treturn constant.Bytes(x)\n}", "func encode(buf *bytes.Buffer, v reflect.Value) error {\n\tswitch v.Kind() {\n\tcase reflect.Invalid: // ignore\n\n\tcase reflect.Float32, reflect.Float64:\n\t\tfmt.Fprintf(buf, \"%f\", v.Float())\n\n\tcase reflect.Complex128, reflect.Complex64:\n\t\tc := v.Complex()\n\t\tfmt.Fprintf(buf, \"#C(%f %f)\", real(c), imag(c))\n\n\tcase reflect.Bool:\n\t\tif v.Bool() {\n\t\t\tfmt.Fprintf(buf, \"t\")\n\t\t}\n\n\tcase reflect.Interface:\n\t\t// type output\n\t\tt := v.Elem().Type()\n\n\t\tleftBuffer := new(bytes.Buffer)\n\t\trightBuffer := new(bytes.Buffer)\n\n\t\tif t.Name() == \"\" { // 名前がつけられてないtypeはそのまま表示する\n\t\t\tfmt.Fprintf(leftBuffer, \"%q\", t)\n\t\t} else {\n\t\t\tfmt.Fprintf(leftBuffer, \"\\\"%s.%s\\\" \", t.PkgPath(), t.Name()) //一意ではないとはこういうことか?\n\t\t}\n\n\t\t// value output\n\t\tif err := encode(rightBuffer, v.Elem()); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif len(rightBuffer.Bytes()) != 0 {\n\t\t\tbuf.WriteByte('(')\n\t\t\tbuf.Write(leftBuffer.Bytes())\n\t\t\tbuf.WriteByte(' ')\n\t\t\tbuf.Write(rightBuffer.Bytes())\n\t\t\tbuf.WriteByte(')')\n\t\t}\n\n\tcase reflect.Int, reflect.Int8, reflect.Int16,\n\t\treflect.Int32, reflect.Int64:\n\t\tfmt.Fprintf(buf, \"%d\", v.Int())\n\n\tcase reflect.Uint, reflect.Uint8, reflect.Uint16,\n\t\treflect.Uint32, reflect.Uint64, reflect.Uintptr:\n\t\tfmt.Fprintf(buf, \"%d\", v.Uint())\n\n\tcase reflect.String:\n\t\tfmt.Fprintf(buf, \"%q\", v.String())\n\n\tcase reflect.Ptr:\n\t\treturn encode(buf, v.Elem())\n\n\tcase reflect.Array, reflect.Slice: // (value ...)\n\n\t\tcontent := new(bytes.Buffer)\n\n\t\tisFirst := true\n\n\t\tfor i := 0; i < v.Len(); i++ {\n\t\t\tif isFirst {\n\t\t\t\tisFirst = false\n\t\t\t\tcontent.WriteByte(' ')\n\t\t\t}\n\t\t\tif err := encode(content, v.Index(i)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\tif len(content.Bytes()) != 0 {\n\t\t\tbuf.WriteByte('(')\n\t\t\tbuf.Write(content.Bytes())\n\t\t\tbuf.WriteByte(')')\n\t\t}\n\n\tcase reflect.Struct: // ((name value) ...)\n\n\t\tcontent := new(bytes.Buffer)\n\n\t\tisFirst := true\n\n\t\tfor i := 0; i < v.NumField(); i++ {\n\t\t\trightBuffer := new(bytes.Buffer)\n\t\t\tif err := encode(rightBuffer, v.Field(i)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif len(rightBuffer.Bytes()) != 0 {\n\t\t\t\tif isFirst {\n\t\t\t\t\tcontent.WriteByte(' ')\n\t\t\t\t\tisFirst = false\n\t\t\t\t}\n\t\t\t\tcontent.WriteByte('(')\n\t\t\t\tfmt.Fprintf(content, \"%s\", v.Type().Field(i).Name)\n\t\t\t\tcontent.WriteByte(' ')\n\t\t\t\tcontent.Write(rightBuffer.Bytes())\n\t\t\t\tcontent.WriteByte(')')\n\t\t\t}\n\t\t}\n\n\t\tif len(content.Bytes()) != 0 {\n\t\t\tbuf.WriteByte('(')\n\t\t\tbuf.Write(content.Bytes())\n\t\t\tbuf.WriteByte(')')\n\t\t}\n\n\tcase reflect.Map: // ((key value) ...)\n\t\tisFirst := true\n\t\tcontent := new(bytes.Buffer)\n\n\t\tfor _, key := range v.MapKeys() {\n\t\t\tif isFirst {\n\t\t\t\tcontent.WriteByte(' ')\n\t\t\t\tisFirst = false\n\t\t\t}\n\n\t\t\tleftBuffer := new(bytes.Buffer)\n\t\t\trightBuffer := new(bytes.Buffer)\n\n\t\t\tif err := encode(leftBuffer, key); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := encode(rightBuffer, v.MapIndex(key)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif len(rightBuffer.Bytes()) != 0 {\n\t\t\t\tcontent.WriteByte('(')\n\t\t\t\tcontent.Write(leftBuffer.Bytes())\n\t\t\t\tcontent.WriteByte(' ')\n\t\t\t\tcontent.Write(rightBuffer.Bytes())\n\t\t\t\tcontent.WriteByte(')')\n\t\t\t}\n\t\t}\n\n\t\tif len(content.Bytes()) != 0 {\n\t\t\tbuf.WriteByte('(')\n\t\t\tbuf.Write(content.Bytes())\n\t\t\tbuf.WriteByte(')')\n\t\t}\n\n\tdefault: // float, complex, bool, chan, func, interface\n\t\treturn fmt.Errorf(\"unsupported type: %s\", v.Type())\n\t}\n\treturn nil\n}", "func (g G1) Bytes() []byte { return g.encodeBytes(false) }", "func encodeString(b *bytes.Buffer, s string) error {\n\t// Note that this should have used EncodeUvarint but a glitch happened\n\t// while designing the checkpoint format.\n\tif _, err := EncodeVarint(b, int64(len(s))); err != nil {\n\t\treturn err\n\t}\n\tif _, err := b.WriteString(s); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (ksgb *KqiSourceGroupBy) StringX(ctx context.Context) string {\n\tv, err := ksgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (buf Hex) String() string {\n\treturn hex.EncodeToString(buf)\n}", "func (this *Codec) serialize(root *TreeNode) string {\n if root == nil {\n return \"x\"\n }\n return strconv.Itoa(root.Val) + \",\" + this.serialize(root.Left)+ \",\" + this.serialize(root.Right)\n}", "func Encode(value string) string {\n\t// Don't encode empty strings\n\tif value == \"\" {\n\t\treturn \"\"\n\t}\n\tvar buf bytes.Buffer\n\tcompressorWriter, err := flate.NewWriter(&buf, 1) // compression level 1 (fastest)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tcompressorWriter.Write([]byte(value))\n\tcompressorWriter.Close()\n\treturn hex.EncodeToString(buf.Bytes())\n\t//return base32.StdEncoding.EncodeToString(buf.Bytes())\n}", "func (x *Index) Bytes() []byte", "func (rgb *RentGroupBy) StringX(ctx context.Context) string {\n\tv, err := rgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func StringBytes(b []byte) string { return *(*string)(Pointer(&b)) }", "func (self X) Serialise(enc *gob.Encoder) error {\n\treturn enc.Encode(self)\n}", "func (name PeerName) bytes() []byte {\n\tres, err := hex.DecodeString(string(name))\n\tif err != nil {\n\t\tpanic(\"unable to decode name to bytes: \" + name)\n\t}\n\treturn res\n}", "func BytesToHexSting(bytes []byte) string {\n\n\treturn hex.EncodeToString(bytes)\n\n}", "func (me TCryptoBinary) ToXsdtBase64Binary() xsdt.Base64Binary { return xsdt.Base64Binary(me) }", "func toHex(x []byte) []byte {\n\tz := make([]byte, 2*len(x))\n\thex.Encode(z, x)\n\treturn z\n}", "func (pk _Ed25519PublicKey) _StringRaw() string {\n\treturn hex.EncodeToString(pk.keyData)\n}", "func encode(k Key) ([]byte, error) {\n\tver := k.version()\n\tpsize := ver.PayloadSize()\n\ttsize := 1 + psize + 4\n\traw := k.raw()\n\tif len(raw) > psize {\n\t\treturn nil, errors.New(\"tbd\")\n\t}\n\ttmp := make([]byte, tsize)\n\ttmp[0] = byte(ver)\n\tcopy(tmp[len(tmp)-4-len(raw):], raw)\n\tsum := doublehash.SumDoubleSha256(tmp[:1+psize])\n\tcopy(tmp[1+psize:], sum[:4])\n\treturn rippleEncoding.Encode(tmp)\n}", "func BytetoStrHex(str []byte) string {\n\treturn hex.EncodeToString(str)\n}", "func (lbs *LatestBlockSelect) StringX(ctx context.Context) string {\n\tv, err := lbs.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (egb *EntityGroupBy) StringX(ctx context.Context) string {\n\tv, err := egb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (t treasure) bytes() []byte {\n\treturn []byte{t.mode, t.param, t.text, t.sprite}\n}", "func RatMarshalText(x *big.Rat,) ([]byte, error)", "func XorByte(buffer []byte, single byte) []byte {\n\toutput := make([]byte, len(buffer))\n\tfor idx := range buffer {\n\t\toutput[idx] = buffer[idx] ^ single\n\t}\n\treturn output\n}", "func encodeStreamBytes(b []byte, buffer bool) ([]byte, error) {\n\treturn encodeStream(dummyBytesReader(b), buffer)\n}", "func (wfgb *WithFieldsGroupBy) StringX(ctx context.Context) string {\n\tv, err := wfgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (ws *WifiSelect) StringX(ctx context.Context) string {\n\tv, err := ws.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (ggb *GoodsGroupBy) StringX(ctx context.Context) string {\n\tv, err := ggb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (hgb *HarborGroupBy) StringX(ctx context.Context) string {\n\tv, err := hgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (e *Event) Bytes() []byte {\n\tif e.encoded == nil {\n\t\tvar err error\n\t\tprunedData := make(map[string]interface{})\n\t\tfor key, value := range e.data {\n\t\t\tif key == \"@metadata\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tprunedData[key] = value\n\t\t}\n\t\tif e.encoded, err = json.Marshal(prunedData); err != nil {\n\t\t\te.encoded = []byte(\"{\\\"tags\\\":[\\\"_encode_failure\\\"]}\")\n\t\t}\n\t}\n\treturn e.encoded\n}", "func (me TDigestValueType) String() string { return xsdt.Base64Binary(me).String() }", "func String(b []byte) string {\n\treturn string(b)\n}", "func Base64Transaction(tx []byte) (bs string) {\n jtx, _ := json.Marshal(tx)\n bs = base64.StdEncoding.EncodeToString(jtx)\n return bs\n}", "func sliceEncoder(e *encodeState, v reflect.Value) {\n\tif v.IsNil() {\n\t\te.WriteString(\"le\")\n\t\treturn\n\t}\n\tif v.Type().Elem().Kind() == reflect.Uint8 {\n\t\ts := v.Bytes()\n\t\tb := strconv.AppendInt(e.scratch[:0], int64(len(s)), 10)\n\t\te.Write(b)\n\t\te.WriteString(\":\")\n\t\te.Write(s)\n\t}\n}", "func (irgb *InstanceRuntimeGroupBy) StringX(ctx context.Context) string {\n\tv, err := irgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func FixedXorHexEncoded(buf1, buf2 string) string {\n\tbuffer1 := decodeHex(buf1)\n\tbuffer2 := decodeHex(buf2)\n\n\txordBuffer := fixedXor(buffer1, buffer2)\n\n\treturn hex.EncodeToString(xordBuffer)\n}", "func HexStringOfBytes(ar []byte) string {\n\treturn fmt.Sprintf(\"%0X\", ar)\n}", "func binaryDemo() {\n\tbin := []byte(\"jackson\")\n\tfmt.Println(bin)\n\n\tstr := string(bin)\n\tfmt.Println(str)\n}", "func (f genHelperEncoder) EncEncode(v interface{}) { f.e.encode(v) }", "func (js jsonString) bytes() []byte {\n\treturn []byte(js)\n}", "func (this *SIPMessage) EncodeAsBytes() []byte {\n\treturn []byte(this.String())\n}", "func (sigb *SubItemGroupBy) StringX(ctx context.Context) string {\n\tv, err := sigb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (nsgb *NamespaceSecretGroupBy) StringX(ctx context.Context) string {\n\tv, err := nsgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (es *EntitySelect) StringX(ctx context.Context) string {\n\tv, err := es.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (rgb *RemedyGroupBy) StringX(ctx context.Context) string {\n\tv, err := rgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (rs *ReceiptSelect) StringX(ctx context.Context) string {\n\tv, err := rs.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func encode(message interface{}) *bytes.Buffer {\n\tbuffer := &bytes.Buffer{}\n\t// Write struct's data as bytes\n\terr := binary.Write(buffer, binary.BigEndian, message)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\treturn buffer\n}", "func quoteBytes(in []byte) ([]byte, error) {\n\tdata := []string{string(in)}\n\tout, err := json.Marshal(data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn out[1 : len(out)-1], nil\n}", "func (bs ByteSlice) String() string {\n\treturn hex.EncodeToString([]byte(bs))\n}", "func (x *Index) Bytes() []byte {}", "func B64EncodeByteToStr(inputBytes []byte) string {\r\n\treturn base64.StdEncoding.EncodeToString(inputBytes)\r\n}", "func (e *Encoder) Bytes() []byte { return e.buf }", "func (s *String) encode() []byte {\n\tb := EncodeObject(&s.Meta.Object)\n\tb = append(b, s.Meta.Value...)\n\treturn b\n}", "func encodeMsgPack(msg interface{}) ([]byte, error) {\n\tvar buf bytes.Buffer\n\terr := codec.NewEncoder(&buf, msgpackHandle).Encode(msg)\n\treturn buf.Bytes(), err\n}", "func encodeMsgPack(msg interface{}) ([]byte, error) {\n\tvar buf bytes.Buffer\n\terr := codec.NewEncoder(&buf, msgpackHandle).Encode(msg)\n\treturn buf.Bytes(), err\n}", "func Hex(data []byte) string {\n\treturn fmt.Sprintf(\"%x\", data)\n}", "func (this *Codec) serialize(root *TreeNode) string {\n\ttmp := []string{}\n\ts(root, &tmp)\n\tthis.SerializeStr = strings.Join(tmp, \",\")\n\treturn this.SerializeStr\n}", "func CastToBytes(varName string, datatypeName string) string {\n\tswitch datatypeName {\n\tcase field.TypeString:\n\t\treturn fmt.Sprintf(\"%[1]vBytes := []byte(%[1]v)\", varName)\n\tcase field.TypeUint:\n\t\treturn fmt.Sprintf(`%[1]vBytes := make([]byte, 8)\n \t\tbinary.BigEndian.PutUint64(%[1]vBytes, %[1]v)`, varName)\n\tcase field.TypeInt:\n\t\treturn fmt.Sprintf(`%[1]vBytes := make([]byte, 4)\n \t\tbinary.BigEndian.PutUint32(%[1]vBytes, uint32(%[1]v))`, varName)\n\tcase field.TypeBool:\n\t\treturn fmt.Sprintf(`%[1]vBytes := []byte{0}\n\t\tif %[1]v {\n\t\t\t%[1]vBytes = []byte{1}\n\t\t}`, varName)\n\tcase field.TypeCustom:\n\t\treturn fmt.Sprintf(`%[1]vBufferBytes := new(bytes.Buffer)\n\t\tjson.NewEncoder(%[1]vBytes).Encode(%[1]v)\n\t\t%[1]vBytes := reqBodyBytes.Bytes()`, varName)\n\tdefault:\n\t\tpanic(fmt.Sprintf(\"unknown type %s\", datatypeName))\n\t}\n}", "func (e *Event) packBytes() ([]byte, error) {\n\tdata := make([]interface{}, 2)\n\tdata[0] = e.Header\n\tdata[1] = e.Name\n\n\tfor _, a := range e.Args {\n\t\tdata = append(data, a)\n\t}\n\n\tvar buf []byte\n\n\tenc := codec.NewEncoderBytes(&buf, &mh)\n\tif err := enc.Encode(data); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn buf, nil\n}", "func (b *Buffer) ToStringHex() string {\n\treturn hex.EncodeToString(b.b)\n}", "func (ecpgb *EntityContactPointGroupBy) StringX(ctx context.Context) string {\n\tv, err := ecpgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}", "func (res *response) Marshal(buf []byte) ([]byte, error) {\n\tvar size uint64\n\tsize += 10\n\tsize += 10 + uint64(len(res.Error))\n\tsize += 10 + uint64(len(res.Reply))\n\tif uint64(cap(buf)) >= size {\n\t\tbuf = buf[:size]\n\t} else {\n\t\tbuf = make([]byte, size)\n\t}\n\tvar offset uint64\n\tvar n uint64\n\t//n = code.EncodeVarint(buf[offset:], res.Seq)\n\t{\n\t\tvar t = res.Seq\n\t\tvar size = code.SizeofVarint(t)\n\t\tfor i := uint64(0); i < size-1; i++ {\n\t\t\tbuf[offset+i] = byte(t) | 0x80\n\t\t\tt >>= 7\n\t\t}\n\t\tbuf[offset+size-1] = byte(t)\n\t\tn = size\n\t}\n\toffset += n\n\t//n = code.EncodeString(buf[offset:], res.Error)\n\tif len(res.Error) > 127 {\n\t\tvar length = uint64(len(res.Error))\n\t\tvar lengthSize = code.SizeofVarint(length)\n\t\tvar s = lengthSize + length\n\t\tt := length\n\t\tfor i := uint64(0); i < lengthSize-1; i++ {\n\t\t\tbuf[offset+i] = byte(t) | 0x80\n\t\t\tt >>= 7\n\t\t}\n\t\tbuf[offset+lengthSize-1] = byte(t)\n\t\tcopy(buf[offset+lengthSize:], res.Error)\n\t\tn = s\n\t} else if len(res.Error) > 0 {\n\t\tvar length = uint64(len(res.Error))\n\t\tbuf[offset] = byte(length)\n\t\tcopy(buf[offset+1:], res.Error)\n\t\tn = 1 + length\n\t} else {\n\t\tbuf[offset] = 0\n\t\tn = 1\n\t}\n\toffset += n\n\t//n = code.EncodeBytes(buf[offset:], res.Reply)\n\tif len(res.Reply) > 127 {\n\t\tvar length = uint64(len(res.Reply))\n\t\tvar lengthSize = code.SizeofVarint(length)\n\t\tvar s = lengthSize + length\n\t\tt := length\n\t\tfor i := uint64(0); i < lengthSize-1; i++ {\n\t\t\tbuf[offset+i] = byte(t) | 0x80\n\t\t\tt >>= 7\n\t\t}\n\t\tbuf[offset+lengthSize-1] = byte(t)\n\t\tcopy(buf[offset+lengthSize:], res.Reply)\n\t\tn = s\n\t} else if len(res.Reply) > 0 {\n\t\tvar length = uint64(len(res.Reply))\n\t\tbuf[offset] = byte(length)\n\t\tcopy(buf[offset+1:], res.Reply)\n\t\tn = 1 + length\n\t} else {\n\t\tbuf[offset] = 0\n\t\tn = 1\n\t}\n\toffset += n\n\treturn buf[:offset], nil\n}", "func FloatGobEncode(x *big.Float,) ([]byte, error)", "func (vgb *VehicleGroupBy) StringX(ctx context.Context) string {\n\tv, err := vgb.String(ctx)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn v\n}" ]
[ "0.61320573", "0.60694647", "0.60131705", "0.59500664", "0.59163815", "0.5767117", "0.5757185", "0.57554287", "0.5667976", "0.5657279", "0.5656472", "0.56382847", "0.5624694", "0.55962765", "0.55962765", "0.5578219", "0.55504155", "0.55192083", "0.5516628", "0.55090934", "0.5506715", "0.5486519", "0.54860896", "0.5456805", "0.545348", "0.54375845", "0.54356915", "0.54326665", "0.54215455", "0.54146725", "0.5413539", "0.5402329", "0.53958744", "0.5385883", "0.5383781", "0.53798324", "0.53728276", "0.5371517", "0.5369791", "0.53689784", "0.5365791", "0.5363286", "0.53540295", "0.5345794", "0.53360236", "0.5334033", "0.53216785", "0.53178936", "0.5312357", "0.5308839", "0.5307092", "0.53064936", "0.53055364", "0.53005624", "0.5297343", "0.52963483", "0.52929133", "0.52756417", "0.5275163", "0.5268036", "0.52615", "0.5257817", "0.52526486", "0.5247585", "0.5246058", "0.5245906", "0.52405894", "0.5240273", "0.5235904", "0.5231608", "0.52304983", "0.52281666", "0.5227643", "0.5226286", "0.5221835", "0.521888", "0.52125496", "0.5211991", "0.5206939", "0.52027184", "0.51977783", "0.51972497", "0.51947457", "0.51947004", "0.51920605", "0.5191331", "0.5187769", "0.5180252", "0.5176964", "0.5166362", "0.5166362", "0.5162022", "0.51602316", "0.51590085", "0.5157813", "0.5155335", "0.5154999", "0.51432264", "0.514281", "0.5139856" ]
0.717321
0
SearchContact returns a list of contacts based on email address or first + last name
func (api HatchbuckClient) SearchContact(criteria SearchCriteria) ([]Contact, error) { var c []Contact endpoint := fmt.Sprintf("%v/contact/search?api_key=%v", api.baseURL, api.key) payload, _ := json.Marshal(criteria) res, err := http.Post(endpoint, "application/json", bytes.NewBuffer(payload)) if err != nil { return nil, err } decoder := json.NewDecoder(res.Body) err = decoder.Decode(&c) if err != nil { return nil, err } return c, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (wechat *WeChat) SearchContact(nickName string, city string, sex int, contactType int) ([]*Contact, error) {\n\twechat.cache.Lock()\n\twechat.cache.Unlock()\n\n\tvar cs []*Contact\n\tfor _, c := range wechat.cache.contacts {\n\t\tif c.NickName == nickName {\n\t\t\tif len(city) > 0 && c.City != city {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif contactType != Any && c.Type != contactType {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif sex == Any {\n\t\t\t\tcs = append(cs, c)\n\t\t\t} else if (sex == Male && c.Sex == 0) || (sex == Female && c.Sex == 1) || (sex == Unknow && c.Sex == 3) {\n\t\t\t\tcs = append(cs, c)\n\t\t\t}\n\t\t}\n\t}\n\tif len(cs) > 0 {\n\t\treturn cs, nil\n\t}\n\treturn nil, errors.New(`not found`)\n}", "func (c *Client) SearchContact(contactName string) (string, error) {\n\tif !c.authenticated {\n\t\treturn \"\", errors.New(\"Not authenticated. Call Authenticate first\")\n\t}\n\n\tpayload := make(map[string]interface{})\n\tpayload[\"contact_name\"] = contactName\n\tmsg := common.NewMessage(c.userId, \"server\",\n\t\t\"control\", \"seach_contact\", time.Time{},\n\t\tcommon.TEXT, payload)\n\n\tencoded, err := msg.Json()\n\tif err != nil {\n\t\tlog.Println(\"Failed to encode search contact message\", err)\n\t\treturn \"\", err\n\t}\n\n\terr = c.transport.SendText(string(encoded))\n\tif err != nil {\n\t\tlog.Println(\"Failed to send search contact message\", err)\n\t\treturn \"\", err\n\t}\n\n\tresp := <-c.Out\n\tif resp.Status() == common.STATUS_ERROR {\n\t\terrMsg := resp.Error()\n\t\tlog.Println(\"Search contact response error\", errMsg)\n\t\treturn \"\", errors.New(errMsg)\n\t}\n\n\tcontactId := resp.GetJsonData(\"contact_id\").(string)\n\treturn contactId, nil\n}", "func (sdk *Sdk) GetContacts(searchKeyword string, field, page, limit string) (string, error) {\n\tsdkC := sdk.connect\n\tparams := map[string]string{\n\t\t\"q\": searchKeyword,\n\t\t\"field\": field,\n\t\t\"page\": page,\n\t\t\"limit\": limit,\n\t}\n\n\treturn sdkC.rq.Get(\"/api/contacts\", params)\n}", "func (us *Users) Search(f string) (*User, error) {\n\texp := fmt.Sprintf(\"username='%v' OR email='%v'\", f, f)\n\n\treturn getUserWhere(exp)\n}", "func (ps *PersonService) Search(qry string, opts ...FuncOption) ([]*Person, error) {\n\turl, err := ps.client.searchURL(PersonEndpoint, qry, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar p []*Person\n\n\terr = ps.client.get(url, &p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn p, nil\n}", "func (h *Handler) FetchContacts(c echo.Context) (err error) {\n\tpage, _ := strconv.Atoi(c.QueryParam(\"page\"))\n\tlimit, _ := strconv.Atoi(c.QueryParam(\"limit\"))\n\n\t// Defaults\n\tif page == 0 {\n\t\tpage = 1\n\t}\n\tif limit == 0 {\n\t\tlimit = 10\n\t}\n\n\tname := c.QueryParam(\"name\")\n\temail := c.QueryParam(\"email\")\n\tquery := bson.M{}\n\n\tif name != \"\" && email != \"\" {\n\t\tquery = bson.M{\"name\": name, \"email\": email}\n\t} else if name != \"\" {\n\t\tquery = bson.M{\"name\": name}\n\t} else if email != \"\" {\n\t\tquery = bson.M{\"email\": email}\n\t}\n\n\t// Retrieve contacts from database\n\tcontacts := []*model.Contact{}\n\tdb := h.DB.Clone()\n\tif err = db.DB(\"sampark\").C(\"contacts\").\n\t\tFind(query).\n\t\tSkip((page - 1) * limit).\n\t\tLimit(limit).\n\t\tAll(&contacts); err != nil {\n\t\treturn\n\t}\n\tdefer db.Close()\n\n\treturn c.JSON(http.StatusOK, contacts)\n}", "func (c client) FetchContactWithParams(params url.Values) (*Contact, error) {\n\tu := urls.Join(c.base, \"/marketing/contacts/search\")\n\treq, err := http.NewRequest(\"POST\", fmt.Sprintf(\"%s?%s\", u, params.Encode()), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t_, data, err := c.Send(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tres := &struct {\n\t\tContacts []*Contact `json:\"result\"`\n\t}{}\n\n\terr = json.Unmarshal(data, res)\n\tif err != nil {\n\t\treturn nil, err\n\t} else if l := len(res.Contacts); l != 1 {\n\t\treturn nil, ErrNotFound\n\t}\n\n\treturn res.Contacts[0], nil\n}", "func (client *Client) SearchAlertContact(request *SearchAlertContactRequest) (response *SearchAlertContactResponse, err error) {\n\tresponse = CreateSearchAlertContactResponse()\n\terr = client.DoAction(request, response)\n\treturn\n}", "func (srv *UsersService) ListContacts(ctx *gin.Context) {\n\tlogger := srv.logger.New(\"action\", \"ListContacts\")\n\n\tform := forms.ListContacts{}\n\tif err := form.BindJSON(ctx); err != nil {\n\t\t// Returns a \"422 StatusUnprocessableEntity\" response\n\t\tsrv.ResponseService.ValidatorErrorResponse(ctx, responses.UnprocessableEntity, err)\n\t\treturn\n\t}\n\n\tusersList, err := srv.Repository.GetUsersRepository().FindActiveByPhoneNumbers(form.PhoneNumbers)\n\tif err != nil {\n\t\tlogger.Error(\"сan't load list of user\", \"error\", err)\n\t\tsrv.ResponseService.Error(ctx, responses.CannotRetrieveCollection, \"Can't load list of users\")\n\t\treturn\n\t}\n\n\tshortener := serializers.NewShortUsers()\n\tsrv.ResponseService.OkResponse(ctx, shortener.Short(usersList, serializers.ShortContactsFields))\n\treturn\n}", "func FindPeople(c *gin.Context) {\n query := c.Request.URL.Query()\n\n var people []models.Person\n\n if len(query) == 0 {\n models.DB.Find(&people)\n } else if query.Get(\"id\") != \"\" {\n if err := models.DB.Find(&people, \"id = ?\", query.Get(\"id\")).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": \"Record not found!\"})\n return\n }\n } else if query.Get(\"created_by\") != \"\" {\n var searchPerson SearchPersonInput\n\n if bindErr := c.BindQuery(&searchPerson); bindErr != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": bindErr.Error()})\n return\n }\n\n if err := models.DB.Where(&models.Person{FirstName: searchPerson.FirstName, LastName: searchPerson.LastName, Email: searchPerson.Email, Phone: searchPerson.Phone, Birthday: searchPerson.Birthday, Title: searchPerson.Title, Department: searchPerson.Department, Self: searchPerson.Self}).Find(&people, \"created_by = ?\", query.Get(\"created_by\")).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": \"Record not found!\"})\n return\n }\n } else {\n var searchPerson SearchPersonInput\n\n if bindErr := c.BindQuery(&searchPerson); bindErr != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": bindErr.Error()})\n return\n }\n\n if err := models.DB.Where(&models.Person{FirstName: searchPerson.FirstName, LastName: searchPerson.LastName, Email: searchPerson.Email, Phone: searchPerson.Phone, Birthday: searchPerson.Birthday, Title: searchPerson.Title, Self: searchPerson.Self}).Find(&people).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": \"Record not found!\"})\n return\n }\n }\n\n c.JSON(http.StatusOK, gin.H{\"data\": people})\n}", "func (api *API) QueryContacts(params *parameters.QueryContacts) (*types.Contacts, error) {\n\treturn api.queryContacts(\"/contacts\", params)\n}", "func getContacts(w http.ResponseWriter, r *http.Request) {\n\n\tquery := r.URL.Query()\n\tuserID := query.Get(\"user\")\n\n\tinfectionTimestamp, err := time.Parse(time.RFC3339, query.Get(\"infection_timestamp\"))\n\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\tvar fourteenDaysBeforeTimestamp = infectionTimestamp.AddDate(0, 0, -14)\n\n\tvar contactsArray []schema.Contact\n\tvar usersArray []string\n\n\tcur, err := contacts.Find(\n\t\tcontext.TODO(),\n\t\tbson.D{ // Filtering out the contacts. ( This filter is made by combining multiple small filters using \"AND\" and \"OR\" operations. )\n\t\t\t{\"$or\", // filter for getting values with -> either \"useridone == userID\" or \"useridtwo == userID\"\n\t\t\t\tbson.A{\n\t\t\t\t\tbson.D{{\"useridone\", userID}},\n\t\t\t\t\tbson.D{{\"useridtwo\", userID}},\n\t\t\t\t},\n\t\t\t},\n\t\t\t{\"timeofcontact\", bson.M{\"$gte\": fourteenDaysBeforeTimestamp}}, // filter to get time >= fourteenDaysBeforeTimestamp\n\t\t\t{\"timeofcontact\", bson.M{\"$lte\": infectionTimestamp}}, // filter to get time <= finfectionTimestamp\n\t\t},\n\t)\n\n\t// Close the cursor once finished\n\tdefer cur.Close(context.TODO())\n\n\tfor cur.Next(context.TODO()) {\n\n\t\t// create a value into which the single document can be decoded\n\t\tvar contact schema.Contact\n\t\t// & character returns the memory address of the following variable.\n\t\terr := cur.Decode(&contact) // decode similar to deserialize process.\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\t// add item our array\n\t\tcontactsArray = append(contactsArray, contact)\n\t\tif userID == contact.UserIDTwo {\n\t\t\tusersArray = append(usersArray, contact.UserIDOne)\n\t\t} else {\n\t\t\tusersArray = append(usersArray, contact.UserIDTwo)\n\t\t}\n\n\t}\n\n\tif err := cur.Err(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tjson.NewEncoder(w).Encode(usersArray) // encode similar to serialize process.\n}", "func (c *Config) UserSearch(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\n\tquery := r.URL.Query().Get(\"query\")\n\tif query == \"\" {\n\t\tbjson.WriteJSON(w, map[string]string{\"message\": \"query cannot be empty\"}, http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tcontacts, err := c.UserStore.Search(ctx, query)\n\tif err != nil {\n\t\tbjson.HandleError(w, err)\n\t\treturn\n\t}\n\n\tbjson.WriteJSON(w, map[string]interface{}{\"users\": contacts}, http.StatusOK)\n}", "func DefaultListContact(ctx context.Context, db *gorm.DB) ([]*Contact, error) {\n\tormResponse := []ContactORM{}\n\tdb, err := ops.ApplyCollectionOperators(db, ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\taccountID, err := auth.GetAccountID(ctx, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdb = db.Where(&ContactORM{AccountID: accountID})\n\tif err := db.Set(\"gorm:auto_preload\", true).Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse := []*Contact{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := responseEntry.ToPB(ctx)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func Contacts(token string) ([]ContactReturn, error) {\n\n\t// To decode the json data\n\tvar contacts []ContactReturn\n\n\t// To call the pages\n\tpage := 1\n\n\t// Loop over all sites\n\tfor {\n\n\t\t// Set config for new request\n\t\tr := Request{fmt.Sprintf(\"/contacts?page=%d&per_page=25\", page), \"GET\", token, nil}\n\n\t\t// Send new request\n\t\tresponse, err := r.Send()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Decode data\n\t\tvar decode []ContactReturn\n\n\t\terr = json.NewDecoder(response.Body).Decode(&decode)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Close response body\n\t\tresponse.Body.Close()\n\n\t\t// Add contacts\n\t\tfor _, value := range decode {\n\t\t\tcontacts = append(contacts, value)\n\t\t}\n\n\t\t// Check length & break the loop\n\t\tif len(decode) < 25 {\n\t\t\tbreak\n\t\t} else {\n\t\t\tpage++\n\t\t}\n\n\t}\n\n\t// Return data\n\treturn contacts, nil\n\n}", "func ContactGet(id int64) (Contact, error) {\n\tvar contact Contact\n\tif id == 0 {\n\t\treturn contact, nil\n\t}\n\tcontact.ID = id\n\terr := pool.QueryRow(context.Background(), `\n\t\tSELECT\n\t\t\tc.name,\n\t\t\tc.company_id,\n\t\t\tc.department_id,\n\t\t\tc.post_id,\n\t\t\tc.post_go_id,\n\t\t\tc.rank_id,\n\t\t\tc.birthday,\n\t\t\tc.note,\n\t\t\tc.created_at,\n\t\t\tc.updated_at,\n\t\t\tarray_agg(DISTINCT e.email) AS emails,\n\t\t\tarray_agg(DISTINCT ph.phone) AS phones,\n\t\t\tarray_agg(DISTINCT f.phone) AS faxes,\n\t\t\tarray_agg(DISTINCT ed.start_date) AS educations\n\t\tFROM\n\t\t\tcontacts AS c\n\t\tLEFT JOIN\n\t\t\temails AS e ON c.id = e.contact_id\n\t\tLEFT JOIN\n\t\t\tphones AS ph ON c.id = ph.contact_id AND ph.fax = false\n\t\tLEFT JOIN\n\t\t\tphones AS f ON c.id = f.contact_id AND f.fax = true\n\t\tLEFT JOIN\n\t\t\teducations AS ed ON c.id = ed.contact_id\n\t\tWHERE\n\t\t\tc.id = $1\n\t\tGROUP BY\n\t\t\tc.id\n\t`, id).Scan(&contact.Name, &contact.CompanyID, &contact.DepartmentID, &contact.PostID, &contact.PostGOID, &contact.RankID,\n\t\t&contact.Birthday, &contact.Note, &contact.CreatedAt, &contact.UpdatedAt, &contact.Emails, &contact.Phones, &contact.Faxes, &contact.Educations)\n\tif err != nil {\n\t\terrmsg(\"GetContact QueryRow\", err)\n\t\treturn contact, err\n\t}\n\treturn contact, err\n}", "func (c *ContactService) ListByEmail(email string, params PageParams) (ContactList, error) {\n\treturn c.Repository.list(contactListParams{PageParams: params, Email: email})\n}", "func DefaultListContact(ctx context.Context, db *gorm.DB) ([]*Contact, error) {\n\tormResponse := []ContactORM{}\n\tdb, err := ops.ApplyCollectionOperators(db, ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err := db.Set(\"gorm:auto_preload\", true).Find(&ormResponse).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse := []*Contact{}\n\tfor _, responseEntry := range ormResponse {\n\t\ttemp, err := ConvertContactFromORM(responseEntry)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpbResponse = append(pbResponse, &temp)\n\t}\n\treturn pbResponse, nil\n}", "func (client *Client) SearchAlertContactWithCallback(request *SearchAlertContactRequest, callback func(response *SearchAlertContactResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *SearchAlertContactResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.SearchAlertContact(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}", "func (c client) FetchContactByEmail(email string) (*Contact, error) {\n\tparams := make(url.Values)\n\tparams.Set(\"email\", email)\n\treturn c.FetchContactWithParams(params)\n}", "func FindContact(condition interface{}) (Contact, error) {\n\tdb := common.GetDB()\n\tvar contactModel Contact\n\terr := db.Limit(10).First(&contactModel, condition).Error\n\treturn contactModel, err\n}", "func (client *Client) SearchAlertContactWithChan(request *SearchAlertContactRequest) (<-chan *SearchAlertContactResponse, <-chan error) {\n\tresponseChan := make(chan *SearchAlertContactResponse, 1)\n\terrChan := make(chan error, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tdefer close(responseChan)\n\t\tdefer close(errChan)\n\t\tresponse, err := client.SearchAlertContact(request)\n\t\tif err != nil {\n\t\t\terrChan <- err\n\t\t} else {\n\t\t\tresponseChan <- response\n\t\t}\n\t})\n\tif err != nil {\n\t\terrChan <- err\n\t\tclose(responseChan)\n\t\tclose(errChan)\n\t}\n\treturn responseChan, errChan\n}", "func GetPeople(db *gorm.DB) func(c echo.Context) error {\n return func(c echo.Context) error {\n // get user\n if user, err := GetUser(c, db); err == nil {\n people := []models.User{}\n // get search key\n key := c.QueryParam(\"key\")\n if len(key) > 0 {\n key := \"%\" + key + \"%\"\n // search for other people but user\n db.Where(\"user_name LIKE ? OR email LIKE ? OR first_name LIKE ? OR last_name LIKE ?\", key, key, key, key).\n Not(\"id = ?\", user.ID).\n Find(&people)\n }\n return c.JSON(http.StatusOK, people)\n } else {\n return c.JSON(http.StatusBadRequest, map[string]string{\"message\": err.Error()})\n }\n }\n}", "func (cs *CustomerService) SearchCustomer(name string) []Customer{\n\n\tindex := SuperHash(name, len(customers))\n\n\tfound, _, indexes := Search(customerTable[index], name)\n\n\tif found {\n\t\tcustomers := make([]Customer, 0)\n\n\t\tfor _, vIndex := range indexes {\n\t\t\tfirstName := strings.Split(name, \" \")[0]\n\n\t\t\tlastName := strings.Split(name, \" \")[1]\n\n\t\t\tpoints := pointsTable[index][vIndex]\n\n\t\t\trank := rankTable[index][vIndex]\n\n\t\t\tlevel := 1\n\n\t\t\tif points >= 5000 {\n\t\t\t\tlevel = 3\n\t\t\t} else if points >= 1000 {\n\t\t\t\tlevel = 2\n\t\t\t}\n\n\t\t\tcustomer := Customer{firstName, lastName, rank, level, points}\n\t\t\tcustomers = append(customers, customer)\n\t\t}\n\n\t\treturn customers\n\t}\n\n\n\treturn nil\n\t\n}", "func (repository Users) SearchByEmail(email string) (models.User, error) {\n\tline, error := repository.db.Query(\"SELECT id, password FROM users where email = ?\", email)\n\n\tif error != nil {\n\t\treturn models.User{}, error\n\t}\n\n\tdefer line.Close()\n\n\tvar user models.User\n\n\tif line.Next() {\n\t\tif error = line.Scan(&user.ID, &user.Password); error != nil {\n\t\t\treturn models.User{}, error\n\t\t}\n\t}\n\n\treturn user, nil\n}", "func (p *Parser) SearchPeople(name string, page int) ([]model.PeopleSearch, int, error) {\n\tq := map[string]interface{}{\"q\": name, \"show\": 50 * (page - 1)}\n\tdoc, code, err := p.getDoc(utils.BuildURLWithQuery(q, malURL, \"people.php\"), \"#content\")\n\tif err != nil {\n\t\treturn nil, code, err\n\t}\n\treturn p.search.GetPeople(doc), http.StatusOK, nil\n}", "func FindPeopleByPhoneNumber(phoneNumber string) []*Person {\n\tresult := make([]*Person, 0)\n\n\tfor _, person := range people {\n\t\tif person.PhoneNumber == phoneNumber {\n\t\t\tresult = append(result, person)\n\t\t}\n\t}\n\n\treturn result\n}", "func SearchUserByName(searchTerm string, userID string) ([]*model.User, error) {\n\n\tuserList := []*model.User{}\n\n\t// get user information\n\trows, err := db.SQL.Query(`SELECT id, email, firstName, lastName FROM \"User\" WHERE \"firstName\" || ' ' || \"lastName\" LIKE ? OR \"email\" LIKE lower(?) AND \"id\" != ? LIMIT 20;`, \"%\"+searchTerm+\"%\", \"%\"+searchTerm+\"%\", userID)\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn []*model.User{}, errors.New(\"Internal error.\")\n\t}\n\n\tdefer rows.Close()\n\n\t// map query to user object list\n\tfor rows.Next() {\n\t\tnewUser := &model.User{}\n\t\terr := rows.Scan(&newUser.ID, &newUser.Email, &newUser.FirstName, &newUser.LastName)\n\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\treturn []*model.User{}, errors.New(\"Internal error.\")\n\t\t}\n\t\tuserList = append(userList, newUser)\n\t}\n\n\terr = rows.Err()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn []*model.User{}, errors.New(\"Internal error.\")\n\t}\n\n\treturn userList, nil\n}", "func (s *Service) ContactsGet(contactID string) *ContactsGetOp {\n\treturn &ContactsGetOp{\n\t\tCredential: s.credential,\n\t\tMethod: \"GET\",\n\t\tPath: strings.Join([]string{\"contacts\", contactID}, \"/\"),\n\t\tAccept: \"application/json\",\n\t\tQueryOpts: make(url.Values),\n\t\tVersion: esign.APIv21,\n\t}\n}", "func ContactListGet() ([]ContactList, error) {\n\tvar contacts []ContactList\n\trows, err := pool.Query(context.Background(), `\n\t\tSELECT\n\t\t\tc.id,\n\t\t\tc.name,\n\t\t\tco.id AS company_id,\n\t\t\tco.name AS company_name,\n\t\t\tpo.name AS post_name,\n\t\t\tarray_agg(DISTINCT ph.phone) AS phones,\n\t\t\tarray_agg(DISTINCT f.phone) AS faxes\n\t\tFROM\n\t\t\tcontacts AS c\n\t\tLEFT JOIN\n\t\t\tcompanies AS co ON c.company_id = co.id\n\t\tLEFT JOIN\n\t\t\tposts AS po ON c.post_id = po.id\n\t\tLEFT JOIN\n\t\t\tphones AS ph ON c.id = ph.contact_id AND ph.fax = false\n\t\tLEFT JOIN\n\t\t\tphones AS f ON c.id = f.contact_id AND f.fax = true\n\t\tGROUP BY\n\t\t\tc.id,\n\t\t\tco.id,\n\t\t\tpo.name\n\t\tORDER BY\n\t\t\tname ASC\n\t`)\n\tif err != nil {\n\t\terrmsg(\"GetContactList Query\", err)\n\t}\n\tfor rows.Next() {\n\t\tvar contact ContactList\n\t\terr := rows.Scan(&contact.ID, &contact.Name, &contact.CompanyID, &contact.CompanyName,\n\t\t\t&contact.PostName, &contact.Phones, &contact.Faxes)\n\t\tif err != nil {\n\t\t\terrmsg(\"GetContactList Scan\", err)\n\t\t\treturn contacts, err\n\t\t}\n\t\tcontacts = append(contacts, contact)\n\t}\n\treturn contacts, rows.Err()\n}", "func IndexContacts(db *sql.DB, elasticURL string, index string, lastModified time.Time) (int, int, error) {\n\tbatch := strings.Builder{}\n\tcreatedCount, deletedCount, processedCount := 0, 0, 0\n\n\tif index == \"\" {\n\t\treturn createdCount, deletedCount, fmt.Errorf(\"empty index passed to IndexContacts\")\n\t}\n\n\tvar modifiedOn time.Time\n\tvar contactJSON string\n\tvar id, orgID int64\n\tvar isActive bool\n\n\tstart := time.Now()\n\n\tfor {\n\t\trows, err := db.Query(contactQuery, lastModified)\n\n\t\tqueryCreated := 0\n\t\tqueryCount := 0\n\t\tqueryModified := lastModified\n\n\t\t// no more rows? return\n\t\tif err == sql.ErrNoRows {\n\t\t\treturn 0, 0, nil\n\t\t}\n\t\tif err != nil {\n\t\t\treturn 0, 0, err\n\t\t}\n\t\tdefer rows.Close()\n\n\t\tfor rows.Next() {\n\t\t\terr = rows.Scan(&orgID, &id, &modifiedOn, &isActive, &contactJSON)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, 0, err\n\t\t\t}\n\n\t\t\tqueryCount++\n\t\t\tprocessedCount++\n\t\t\tlastModified = modifiedOn\n\n\t\t\tif isActive {\n\t\t\t\tlog.WithField(\"id\", id).WithField(\"modifiedOn\", modifiedOn).WithField(\"contact\", contactJSON).Debug(\"modified contact\")\n\t\t\t\tbatch.WriteString(fmt.Sprintf(indexCommand, id, modifiedOn.UnixNano(), orgID))\n\t\t\t\tbatch.WriteString(\"\\n\")\n\t\t\t\tbatch.WriteString(contactJSON)\n\t\t\t\tbatch.WriteString(\"\\n\")\n\t\t\t} else {\n\t\t\t\tlog.WithField(\"id\", id).WithField(\"modifiedOn\", modifiedOn).Debug(\"deleted contact\")\n\t\t\t\tbatch.WriteString(fmt.Sprintf(deleteCommand, id, modifiedOn.UnixNano(), orgID))\n\t\t\t\tbatch.WriteString(\"\\n\")\n\t\t\t}\n\n\t\t\t// write to elastic search in batches\n\t\t\tif queryCount%batchSize == 0 {\n\t\t\t\tcreated, deleted, err := IndexBatch(elasticURL, index, batch.String())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn 0, 0, err\n\t\t\t\t}\n\t\t\t\tbatch.Reset()\n\n\t\t\t\tqueryCreated += created\n\t\t\t\tcreatedCount += created\n\t\t\t\tdeletedCount += deleted\n\t\t\t}\n\t\t}\n\n\t\tif batch.Len() > 0 {\n\t\t\tcreated, deleted, err := IndexBatch(elasticURL, index, batch.String())\n\t\t\tif err != nil {\n\t\t\t\treturn 0, 0, err\n\t\t\t}\n\n\t\t\tqueryCreated += created\n\t\t\tcreatedCount += created\n\t\t\tdeletedCount += deleted\n\t\t\tbatch.Reset()\n\t\t}\n\n\t\t// last modified stayed the same and we didn't add anything, seen it all, break out\n\t\tif lastModified.Equal(queryModified) && queryCreated == 0 {\n\t\t\tbreak\n\t\t}\n\n\t\telapsed := time.Now().Sub(start)\n\t\trate := float32(processedCount) / (float32(elapsed) / float32(time.Second))\n\t\tlog.WithFields(map[string]interface{}{\n\t\t\t\"rate\": int(rate),\n\t\t\t\"added\": createdCount,\n\t\t\t\"deleted\": deletedCount,\n\t\t\t\"elapsed\": elapsed,\n\t\t\t\"index\": index}).Info(\"updated contact index\")\n\n\t\trows.Close()\n\t}\n\n\treturn createdCount, deletedCount, nil\n}", "func FindPeopleByName(firstName, lastName string) []*Person {\n\tresult := make([]*Person, 0)\n\n\tfor _, person := range people {\n\t\tif person.FirstName == firstName && person.LastName == lastName {\n\t\t\tresult = append(result, person)\n\t\t}\n\t}\n\n\treturn result\n}", "func GetContacts(w http.ResponseWriter, r *http.Request) {\n\n\tvar contacts []models.Contact\n\terr := store.GetDB().All(&contacts)\n\n\tif err != nil {\n\t\tsetting.Renderer.JSON(w, http.StatusBadRequest, err)\n\n\t\treturn\n\t}\n\n\tsetting.Renderer.JSON(w, http.StatusOK, contacts)\n}", "func Handle(w http.ResponseWriter, r *http.Request) {\n\tvar err error\n\tvar txtsearch string\n\tvar ctx = context.Background()\n\tvar query *SearchQuery\n\tvar results []*models.User\n\tapi.OPENFAASGetBody(r, &query)\n\tif goscrappy.Debug {\n\t\tfmt.Printf(\"Query : %+v\\n\", query)\n\t}\n\tvar DB = dbservice.Db\n\tvar qtemplate = `\n\tFOR usr IN users \n\tLET a = (\n\t\t\tFOR comp IN companies FILTER comp == usr.company \n\t\t\tRETURN comp\n\t)\n\tLET b = (\n\t\t\tFOR email IN emails FILTER email == usr.email \n\t\t\tRETURN email\n\t)\n\tLET c = (\n\t\t\tFOR phone IN phones FILTER phone == usr.phone \n\t\t\tRETURN phone\n\t)\n\t%s\n\tLIMIT @offset, @limit\n\tRETURN merge(usr, { \n\t \tcompany: FIRST(a),\n\t \temail: FIRST(b),\n\t \tphone: FIRST(c)\n\t\t}\n\t)\n\t`\n\tif query != nil && query.Intext != nil {\n\t\ttxtsearch = fmt.Sprintf(\n\t\t\t`\n\t\t\tLET srch = LOWER(\"%s\")\n\t\t\tLET inlastname = LOWER(usr.last_name)\n\t\t\tFILTER CONTAINS(inlastname, srch)\n\t\t\t`,\n\t\t\t*query.Intext,\n\t\t)\n\t}\n\tlimitVal := func() int64 {\n\t\tif query != nil && query.Count != nil &&\n\t\t\t*query.Count > 0 {\n\t\t\treturn *query.Count\n\t\t}\n\t\treturn 10\n\t}()\n\toffsetVal := func() int64 {\n\t\tif query != nil && query.Offset != nil &&\n\t\t\t*query.Offset > 0 {\n\t\t\treturn *query.Offset\n\t\t}\n\t\treturn 0\n\t}()\n\tq := fmt.Sprintf(qtemplate, txtsearch)\n\tcursor, err := DB.Query(ctx, q, map[string]interface{}{\n\t\t\"offset\": offsetVal,\n\t\t\"limit\": limitVal,\n\t})\n\tif err != nil {\n\t\tapi.OPENFAASErrorResponse(w, http.StatusInternalServerError, err)\n\t\treturn\n\t}\n\tdefer cursor.Close()\n\tfor {\n\t\tvar doc models.User\n\t\t_, err := cursor.ReadDocument(ctx, &doc)\n\t\tif driver.IsNoMoreDocuments(err) {\n\t\t\tbreak\n\t\t} else if err != nil {\n\t\t\tapi.OPENFAASErrorResponse(w, http.StatusInternalServerError, err)\n\t\t\treturn\n\t\t}\n\t\tresults = append(results, &doc)\n\t}\n\tapi.OPENFAASJsonResponse(w, http.StatusOK, results)\n}", "func (cl ContactList) FindContact(targetName string) *Contact {\n\tfor _, contact := range cl.directory {\n\t\tif contact.Name == targetName {\n\t\t\treturn &contact\n\t\t}\n\t}\n\n\tpanic(\"No Contact Found!\")\n}", "func (cl *ContactService) GetContacts() (*Contact, *Response, error) {\n\n\treq, err := cl.client.NewRequest(\"GET\", contactsUrl, nil)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tnewContactObject := new(Contact)\n\tresp, err := cl.client.Do(req, newContactObject)\n\tif err != nil {\n\t\treturn nil, resp, err\n\t}\n\n\treturn newContactObject, resp, err\n}", "func (r SearchContactsRequest) Send() (*SearchContactsOutput, error) {\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn r.Request.Data.(*SearchContactsOutput), nil\n}", "func SearchClinicsByName(c *gin.Context) {\n\tlog.Infof(\"Get all clinics associated with admin\")\n\tctx := c.Request.Context()\n\tsearchString := c.Query(\"searchString\")\n\tif searchString == \"\" {\n\t\tc.AbortWithStatusJSON(\n\t\t\thttp.StatusBadRequest,\n\t\t\tgin.H{\n\t\t\t\tconstants.RESPONSE_JSON_DATA: nil,\n\t\t\t\tconstants.RESPONSDE_JSON_ERROR: \"Search string is empty\",\n\t\t\t},\n\t\t)\n\t\treturn\n\t}\n\n\tclinicNameSearch := strings.Title(strings.ToLower(searchString))\n\tctx, span := trace.StartSpan(ctx, \"Get all clinics search\")\n\tdefer span.End()\n\tclinicMetaDB := datastoredb.NewClinicMetaHandler()\n\terr := clinicMetaDB.InitializeDataBase(ctx, \"superdentist\")\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(\n\t\t\thttp.StatusInternalServerError,\n\t\t\tgin.H{\n\t\t\t\tconstants.RESPONSE_JSON_DATA: nil,\n\t\t\t\tconstants.RESPONSDE_JSON_ERROR: err.Error(),\n\t\t\t},\n\t\t)\n\t\treturn\n\t}\n\tregisteredClinics, err := clinicMetaDB.SearchClinics(ctx, clinicNameSearch)\n\tif err != nil {\n\t\tc.AbortWithStatusJSON(\n\t\t\thttp.StatusInternalServerError,\n\t\t\tgin.H{\n\t\t\t\tconstants.RESPONSE_JSON_DATA: nil,\n\t\t\t\tconstants.RESPONSDE_JSON_ERROR: err.Error(),\n\t\t\t},\n\t\t)\n\t\treturn\n\t}\n\tresponseData := contracts.GetClinicAddressResponse{\n\t\tClinicDetails: registeredClinics,\n\t}\n\tc.JSON(http.StatusOK, gin.H{\n\t\tconstants.RESPONSE_JSON_DATA: responseData,\n\t\tconstants.RESPONSDE_JSON_ERROR: nil,\n\t})\n\tclinicMetaDB.Close()\n}", "func (s *RegistrarAPI) ListContacts(req *RegistrarAPIListContactsRequest, opts ...scw.RequestOption) (*ListContactsResponse, error) {\n\tvar err error\n\n\tdefaultPageSize, exist := s.client.GetDefaultPageSize()\n\tif (req.PageSize == nil || *req.PageSize == 0) && exist {\n\t\treq.PageSize = &defaultPageSize\n\t}\n\n\tquery := url.Values{}\n\tparameter.AddToQuery(query, \"page\", req.Page)\n\tparameter.AddToQuery(query, \"page_size\", req.PageSize)\n\tparameter.AddToQuery(query, \"domain\", req.Domain)\n\tparameter.AddToQuery(query, \"project_id\", req.ProjectID)\n\tparameter.AddToQuery(query, \"organization_id\", req.OrganizationID)\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"GET\",\n\t\tPath: \"/domain/v2beta1/contacts\",\n\t\tQuery: query,\n\t\tHeaders: http.Header{},\n\t}\n\n\tvar resp ListContactsResponse\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "func (tmdb *TMDb) SearchPerson(name string, options map[string]string) (*PersonSearchResults, error) {\n\tvar availableOptions = map[string]struct{}{\n\t\t\"page\": {},\n\t\t\"search_type\": {},\n\t\t\"include_adult\": {}}\n\tvar people PersonSearchResults\n\tsafeName := url.QueryEscape(name)\n\toptionsString := getOptionsString(options, availableOptions)\n\turi := fmt.Sprintf(\"%s/search/person?query=%s&api_key=%s%s\", baseURL, safeName, tmdb.apiKey, optionsString)\n\tresult, err := getTmdb(uri, &people)\n\treturn result.(*PersonSearchResults), err\n}", "func GetContacts(user uint) ([]*Contact){\n\tcontacts := make([]*Contact, 0)\n\terr := GetDB().Table(\"contacts\").Where(\"user_id = ?\", user).Find(&contacts).Error\n\n\tif err != nil{\n\t\tfmt.Println(err)\n\t\treturn nil\n\t}\n\treturn contacts\n}", "func (s *ContactService) Matches(params *MatchesParams) (*Contacts, *http.Response, error) {\n\tcontacts := new(Contacts)\n\tapiError := new(APIError)\n\tresp, err := s.sling.New().Get(\"users_and_uploaded_by.json\").QueryStruct(params).Receive(contacts, apiError)\n\treturn contacts, resp, firstError(err, apiError)\n}", "func (s *API) ListContacts(req *ListContactsRequest, opts ...scw.RequestOption) (*ListContactsResponse, error) {\n\tvar err error\n\n\tquery := url.Values{}\n\tparameter.AddToQuery(query, \"domain\", req.Domain)\n\tparameter.AddToQuery(query, \"organization_id\", req.OrganizationID)\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"GET\",\n\t\tPath: \"/domain/v2alpha2/contacts\",\n\t\tQuery: query,\n\t\tHeaders: http.Header{},\n\t}\n\n\tvar resp ListContactsResponse\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "func (db *Database) GetContactByEmail(email string) (*Contact, error) {\n\tvar contact *Contact\n\terr := db.Read(func(tx *Transaction) {\n\t\tcontact = tx.GetContactByEmail(email)\n\t})\n\n\treturn contact, err\n}", "func GetPersonByEmail(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\temail := vars[\"email\"]\n\n\tsession := utils.GetMongoSession()\n\tdefer session.Close()\n\n\tsession.SetMode(mgo.Monotonic, true)\n\n\tvar errDB error\n\tc := session.DB(\"test_db\").C(\"persons\")\n\n\tvar result interface{}\n\terrDB = c.Find(bson.M{\"email\": email}).Select(bson.M{\"_id\": 0}).Sort(\"-name\").One(&result)\n\n\tif errDB != nil {\n\t\tif errDB.Error() == \"not found\" {\n\t\t\tutils.SendJSONResponse(w, 404, \"Not Found\", nil)\n\t\t\treturn\n\t\t}\n\n\t\tpanic(errDB)\n\t}\n\n\tutils.SendJSONResponse(w, 0, \"Success\", result)\n}", "func searchSubscriptions(writer http.ResponseWriter, request *http.Request) {\n\tcontactValue := request.FormValue(\"contact\")\n\tsubscriptions, err := controller.GetSubscriptionsByContactValue(database, contactValue)\n\tif err != nil {\n\t\t_ = render.Render(writer, request, err)\n\t\treturn\n\t}\n\tif err := render.Render(writer, request, subscriptions); err != nil {\n\t\t_ = render.Render(writer, request, api.ErrorRender(err))\n\t\treturn\n\t}\n}", "func (b *GetParticipantsQueryBuilder) Contacts(paramQ string) *GetParticipantsQueryBuilder {\n\tb.req.Filter = &tg.ChannelParticipantsContacts{\n\t\tQ: paramQ,\n\t}\n\treturn b\n}", "func (tx *Transaction) GetContactByEmail(email string) *Contact {\n\trow := tx.QueryRow(\n\t\t\"SELECT id, email, name FROM contacts WHERE email = ?\",\n\t\temail,\n\t)\n\n\tvar contact Contact\n\terr := row.Scan(&contact.ID, &contact.Email, &contact.Name)\n\tif err == nil {\n\t\treturn &contact\n\t} else if err == sql.ErrNoRows {\n\t\treturn nil\n\t} else {\n\t\tpanic(err)\n\t}\n}", "func (a *Client) GetContacts(params *GetContactsParams, authInfo runtime.ClientAuthInfoWriter) (*GetContactsOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetContactsParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"getContacts\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/contacts\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &GetContactsReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*GetContactsOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for getContacts: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func searchEmployee() {\n\tif emptyEmployeeDB() {\n\t\tfmt.Println(\"EmployeeDB Empty. Nothing to search\")\n\t} else {\n\t\tfor {\n\t\t\tname := userInputString(\"SEARCH Enter Employee Name: \") \n \t\t\tnp := findEmployee(name)\n \t\t\tif np == nil {\n \t\t\t\tfmt.Printf(\"\\nEmployee: %s not found\", name)\n \t\t\t\tbreak\t\n \t\t\t} else {\n \t\t\t\tfmt.Printf(\"\\nEmployee Details:\")\n \t\t\t\tfmt.Printf(\"\\nName: %s\", np.emp.name)\n \t\t\t\tfmt.Printf(\"\\nAge: %d\", np.emp.age)\n \t\t\t\tfmt.Printf(\"\\nSalary: %s\", int32InsertComma(np.emp.salary))\n \t\t\t}\n\t\t}\n\t}\t\n}", "func (c *Client) ListContacts() ([]string, error) {\n\tvar contacts []string\n\tif !c.authenticated {\n\t\treturn contacts, errors.New(\"Not authenticated. Call Authenticate first\")\n\t}\n\n\tvar payload map[string]interface{}\n\tmsg := common.NewMessage(c.userId, \"server\",\n\t\t\"control\", \"list_contacts\", time.Time{},\n\t\tcommon.TEXT, payload)\n\n\tencoded, err := msg.Json()\n\tif err != nil {\n\t\tlog.Println(\"Failed to encode auth message\", err)\n\t\treturn contacts, err\n\t}\n\n\terr = c.transport.SendText(string(encoded))\n\tif err != nil {\n\t\tlog.Println(\"Failed to send auth message\", err)\n\t\treturn contacts, err\n\t}\n\n\tresp := <-c.Out\n\tif resp.Status() == common.STATUS_ERROR {\n\t\terrMsg := resp.Error()\n\t\tlog.Println(\"List contacts response error\", errMsg)\n\t\treturn contacts, errors.New(errMsg)\n\t}\n\n\tcontactsData := resp.GetJsonData(\"contacts\")\n\trawContacts, ok := contactsData.([]interface{})\n\tif !ok {\n\t\treturn contacts, errors.New(\"Failed to parse contacts response\")\n\t}\n\n\tfor _, rawContact := range rawContacts {\n\t\tcontact, ok := rawContact.(string)\n\t\tif !ok {\n\t\t\tlog.Println(\"Failed to parse contact\", rawContact)\n\t\t\tcontinue\n\t\t}\n\n\t\tcontacts = append(contacts, contact)\n\t}\n\n\treturn contacts, nil\n}", "func (cpr *ContactPostgresRepository) GetContacts(ctx context.Context) ([]Contact, error) {\n\tvar contacts = []Contact{}\n\n\terr := cpr.db.ModelContext(ctx, &contacts).Order(\"id asc\").Select()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"postgres repository: get contacts: %w\", err)\n\t}\n\n\treturn contacts, nil\n}", "func (repository Accounts) SearchByCPF(CPF string) (models.Account, error) {\n\trows, err := repository.db.Query(\n\t\t\"select id, name, cpf, secret, balance, created_at from accounts where cpf = ?\",\n\t\tCPF,\n\t)\n\tif err != nil {\n\t\treturn models.Account{}, err\n\t}\n\tdefer rows.Close()\n\n\tvar account models.Account\n\n\tif rows.Next() {\n\t\tif err = rows.Scan(\n\t\t\t&account.ID,\n\t\t\t&account.Name,\n\t\t\t&account.Cpf,\n\t\t\t&account.Secret,\n\t\t\t&account.Balance,\n\t\t\t&account.CreatedAt,\n\t\t); err != nil {\n\t\t\treturn models.Account{}, err\n\t\t}\n\t} else {\n\t\treturn models.Account{}, errors.New(\"Account not found\")\n\t}\n\n\treturn account, nil\n}", "func (authcl *Client) SearchAccount(query string) ([]gin.Account, error) {\n\tvar accs []gin.Account\n\n\tparams := url.Values{}\n\tparams.Add(\"q\", query)\n\taddress := fmt.Sprintf(\"/api/accounts?%s\", params.Encode())\n\tres, err := authcl.Get(address)\n\tif err != nil {\n\t\treturn accs, err\n\t} else if res.StatusCode != http.StatusOK {\n\t\treturn accs, fmt.Errorf(\"[Account search] Failed. Server returned: %s\", res.Status)\n\t}\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn accs, err\n\t}\n\n\terr = json.Unmarshal(body, &accs)\n\treturn accs, err\n}", "func (s *ContactsService) List() ([]Contact, *Response, error) {\n\tpath := contactPath(nil)\n\twrappedContacts := []contactWrapper{}\n\n\tres, err := s.client.get(path, &wrappedContacts)\n\tif err != nil {\n\t\treturn []Contact{}, res, err\n\t}\n\n\tcontacts := []Contact{}\n\tfor _, contact := range wrappedContacts {\n\t\tcontacts = append(contacts, contact.Contact)\n\t}\n\n\treturn contacts, res, nil\n}", "func SearchPeople(db *mgo.Database, keyword string) ([]People, error) {\n\tcol := db.C(\"people\")\n\tvar result []People\n\terr := col.Find(bson.M{\"name\": bson.RegEx{Pattern: keyword, Options: \"i\"}}).Limit(100).All(&result)\n\treturn result, err\n}", "func (c *Client) Search(ctx context.Context, searchPerson *Person) (*Response, error) {\n\n\t// Do we meet the minimum requirements for searching?\n\tif !SearchMeetsMinimumCriteria(searchPerson) {\n\t\treturn nil, ErrDoesNotMeetMinimumCriteria\n\t}\n\n\t// Start the post data\n\tpostData := url.Values{}\n\n\t// Add the API key (always - API is required by default)\n\tpostData.Add(fieldAPIKey, c.options.apiKey)\n\n\t// Option for pretty response\n\tif !c.options.searchOptions.Search.Pretty {\n\t\tpostData.Add(fieldPretty, valueFalse)\n\t}\n\n\t// Should we show sources?\n\tif c.options.searchOptions.Search.ShowSources != ShowSourcesNone {\n\t\tpostData.Add(fieldShowSources, string(c.options.searchOptions.Search.ShowSources))\n\t}\n\n\t// Add match requirements?\n\tif c.options.searchOptions.Search.MatchRequirements != MatchRequirementsNone {\n\t\tpostData.Add(fieldMatchRequirements, string(c.options.searchOptions.Search.MatchRequirements))\n\t}\n\n\t// Add source category requirements?\n\tif c.options.searchOptions.Search.SourceCategoryRequirements != SourceCategoryRequirementsNone {\n\t\tpostData.Add(fieldSourceCategoryRequirements, string(c.options.searchOptions.Search.SourceCategoryRequirements))\n\t}\n\n\t// Custom minimum match\n\tif c.options.searchOptions.Search.MinimumMatch != MinimumMatch {\n\t\tpostData.Add(fieldMinimumMatch, fmt.Sprintf(\"%v\", c.options.searchOptions.Search.MinimumMatch))\n\t}\n\n\t// Set the \"hide sponsors\" flag (default is false)\n\tif c.options.searchOptions.Search.HideSponsored {\n\t\tpostData.Add(fieldHideSponsored, valueTrue)\n\t}\n\n\t// Set the \"infer persons\" flag (default is false)\n\tif c.options.searchOptions.Search.InferPersons {\n\t\tpostData.Add(fieldInferPersons, valueTrue)\n\t}\n\n\t// Ask for the top match?\n\tif c.options.searchOptions.Search.TopMatch {\n\t\tpostData.Add(fieldTopMatch, valueTrue)\n\t}\n\n\t// Set the live feeds flag (default is true)\n\tif !c.options.searchOptions.Search.LiveFeeds {\n\t\tpostData.Add(fieldLiveFeeds, valueFalse)\n\t}\n\n\t// Parse the search object\n\tpersonJSON, err := json.Marshal(searchPerson)\n\tif err != nil { // This should NEVER error out since the struct is being generated\n\t\treturn nil, err\n\t}\n\n\t// Add the person to the request\n\tpostData.Add(fieldPerson, string(personJSON))\n\n\t// Fire the request\n\tvar response *Response\n\tresponse, err = httpRequest(ctx, c, searchAPIEndpoint, &postData)\n\tif err != nil {\n\t\treturn nil, err\n\t} else if len(response.Error) > 0 {\n\t\treturn nil, errors.New(response.Error)\n\t}\n\treturn response, nil\n}", "func GetClientContacts(c *gin.Context) {\n\tvar _contacts []ClientContact\n\tid := c.Param(\"id\")\n\n\tdb := dbcontroller.Database(dbName)\n\tdefer db.Close()\n\n\tif err := db.Table(\"client_contacts\").Where(\"client_id = ?\", id).Order(\"last_name asc\").Find(&_contacts).Error; err != nil {\n\t\tpanic(err)\n\t}\n\n\tif len(_contacts) <= 0 {\n\t\tc.JSON(http.StatusNoContent, gin.H{\"status\": http.StatusNoContent, \"message\": \"No contacts found!\"})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, _contacts)\n}", "func (a *Client) GetAllContacts(params *GetAllContactsParams, authInfo runtime.ClientAuthInfoWriter) (*GetAllContactsOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewGetAllContactsParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"getAllContacts\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/contacts/internal\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &GetAllContactsReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*GetAllContactsOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for getAllContacts: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (tmdb *TMDb) SearchCompany(name string, options map[string]string) (*CompanySearchResults, error) {\n\tvar availableOptions = map[string]struct{}{\n\t\t\"page\": {}}\n\tvar companies CompanySearchResults\n\tsafeName := url.QueryEscape(name)\n\toptionsString := getOptionsString(options, availableOptions)\n\turi := fmt.Sprintf(\"%s/search/company?query=%s&api_key=%s%s\", baseURL, safeName, tmdb.apiKey, optionsString)\n\tresult, err := getTmdb(uri, &companies)\n\treturn result.(*CompanySearchResults), err\n}", "func (c *ContactService) List(params PageParams) (ContactList, error) {\n\treturn c.Repository.list(contactListParams{PageParams: params})\n}", "func (pq *PersonQuery) QueryContacts() *ContactQuery {\n\tquery := &ContactQuery{config: pq.config}\n\tquery.path = func(ctx context.Context) (fromU *sql.Selector, err error) {\n\t\tif err := pq.prepareQuery(ctx); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tstep := sqlgraph.NewStep(\n\t\t\tsqlgraph.From(person.Table, person.FieldID, pq.sqlQuery()),\n\t\t\tsqlgraph.To(contact.Table, contact.FieldID),\n\t\t\tsqlgraph.Edge(sqlgraph.O2M, false, person.ContactsTable, person.ContactsColumn),\n\t\t)\n\t\tfromU = sqlgraph.SetNeighbors(pq.driver.Dialect(), step)\n\t\treturn fromU, nil\n\t}\n\treturn query\n}", "func (a *ClinicalMetadataServiceApiService) SearchConsents(ctx _context.Context, body Ga4ghSearchConsentsRequest) (Ga4ghSearchConsentsResponse, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue Ga4ghSearchConsentsResponse\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/consents/search\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &body\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarReturnValue, localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHttpResponse, err\n\t}\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\tif localVarHttpResponse.StatusCode == 200 {\n\t\t\tvar v Ga4ghSearchConsentsResponse\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHttpResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHttpResponse, nil\n}", "func (c Client) Search() search.Client {\n\treturn search.NewClient(c...)\n}", "func (ec *executionContext) _Contact(ctx context.Context, sel ast.SelectionSet, obj *models.Contact) graphql.Marshaler {\n\tfields := graphql.CollectFields(ctx, sel, contactImplementors)\n\n\tout := graphql.NewOrderedMap(len(fields))\n\tinvalid := false\n\tfor i, field := range fields {\n\t\tout.Keys[i] = field.Alias\n\n\t\tswitch field.Name {\n\t\tcase \"__typename\":\n\t\t\tout.Values[i] = graphql.MarshalString(\"Contact\")\n\t\tcase \"Id\":\n\t\t\tout.Values[i] = ec._Contact_Id(ctx, field, obj)\n\t\tcase \"Name\":\n\t\t\tout.Values[i] = ec._Contact_Name(ctx, field, obj)\n\t\tdefault:\n\t\t\tpanic(\"unknown field \" + strconv.Quote(field.Name))\n\t\t}\n\t}\n\n\tif invalid {\n\t\treturn graphql.Null\n\t}\n\treturn out\n}", "func (c _StoreImpl) PhoneContact_ByPhoneNumber(PhoneNumber string) (*PhoneContact, bool) {\n\to, ok := RowCacheIndex.Get(\"PhoneContact_PhoneNumber:\" + fmt.Sprintf(\"%v\", PhoneNumber))\n\tif ok {\n\t\tif obj, ok := o.(*PhoneContact); ok {\n\t\t\treturn obj, true\n\t\t}\n\t}\n\n\trow, err := NewPhoneContact_Selector().PhoneNumber_Eq(PhoneNumber).GetRow(base.DB)\n\tif err == nil {\n\t\tRowCacheIndex.Set(\"PhoneContact_PhoneNumber:\"+fmt.Sprintf(\"%v\", row.PhoneNumber), row, 0)\n\t\treturn row, true\n\t}\n\n\tXOLogErr(err)\n\treturn nil, false\n}", "func SearchMember(w http.ResponseWriter, r *http.Request) {\n\tvar data []SearchRequest\n var payload ResponseValues\n\n enc := json.NewEncoder(w)\n enc.SetIndent(\"\", \" \")\n\n // Decodes the request body\n\terr := json.NewDecoder(r.Body).Decode(&data)\n\tif err != nil {\n writeError(w, &enc, &payload, http.StatusBadRequest, \"Invalid request data format\")\n return\n\t}\n\n // Validates the incoming request data\n validated := validateRequestData(data)\n if !validated {\n writeError(w, &enc, &payload, http.StatusBadRequest, \"Invalid request data format\")\n return\n }\n\n // Uses request body data to build the elastic query body\n\telasticQueryBody, err := buildElasticQuery(data)\n\tif err != nil {\n writeError(w, &enc, &payload, http.StatusBadRequest, err.Error())\n\t\treturn\n\t}\n\n // Queries the elastic service\n payload, err = elasticService.QueryElasticService(elasticQueryBody)\n if err != nil {\n writeError(w, &enc, &payload, http.StatusBadRequest, payload.Error)\n return\n }\n\n w.Header().Set(\"Content-Type\", \"application/json\")\n w.WriteHeader(http.StatusOK)\n enc.Encode(payload)\n\n\treturn\n}", "func (c *CallRequest) Search(columns ...string) string {\n\tsearch := c.Form.Get(\"search\")\n\tif search == \"\" {\n\t\treturn \"deleted_at IS NULL\"\n\t}\n\n\tfor i := range columns {\n\t\tcolumns[i] = columns[i] + \" LIKE ?\"\n\t}\n\n\twords := strings.Split(search, \" \")\n\n\tvar andConditions []string\n\tfor i := 0; i < len(words); i++ {\n\t\tandConditions = append(andConditions, \"(\"+strings.Join(columns, \" OR \")+\")\")\n\t}\n\n\tvar wordsParams []string\n\tfor _, w := range words {\n\t\tfor range columns {\n\t\t\twordsParams = append(wordsParams, \"'%\"+w+\"%'\")\n\t\t}\n\t}\n\n\twhere := \"deleted_at IS NULL AND (\" + strings.Join(andConditions, \" AND \") + \")\"\n\tfor strings.Contains(where, \"?\") {\n\t\twhere = strings.Replace(where, \"?\", wordsParams[0], 1)\n\t\twordsParams = wordsParams[1:]\n\t}\n\n\tfmt.Println(where)\n\n\treturn where\n}", "func (a *Client) SearchCustomers(params *SearchCustomersParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*SearchCustomersOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewSearchCustomersParams()\n\t}\n\top := &runtime.ClientOperation{\n\t\tID: \"SearchCustomers\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/v2/customers/search\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &SearchCustomersReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t}\n\tfor _, opt := range opts {\n\t\topt(op)\n\t}\n\n\tresult, err := a.transport.Submit(op)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*SearchCustomersOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for SearchCustomers: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (r *UserRead) search(q *msg.Request, mr *msg.Result) {\n\tvar (\n\t\tuserID, userName string\n\t\trows *sql.Rows\n\t\terr error\n\t)\n\n\tif rows, err = r.stmtSearch.Query(\n\t\tq.Search.User.UserName,\n\t); err != nil {\n\t\tmr.ServerError(err, q.Section)\n\t\treturn\n\t}\n\n\tfor rows.Next() {\n\t\tif err = rows.Scan(\n\t\t\t&userID,\n\t\t\t&userName,\n\t\t); err != nil {\n\t\t\trows.Close()\n\t\t\tmr.ServerError(err, q.Section)\n\t\t\treturn\n\t\t}\n\t\tmr.User = append(mr.User, proto.User{\n\t\t\tID: userID,\n\t\t\tUserName: userName,\n\t\t})\n\t}\n\tif err = rows.Err(); err != nil {\n\t\tmr.ServerError(err, q.Section)\n\t\treturn\n\t}\n\tmr.OK()\n}", "func (a *authenticator) search(\n\toriginalAddr string,\n\ttlsOptions *auth.TLSOptions,\n\toriginalBaseDN string,\n\toriginalScope ScopeType,\n\tderefAliases int,\n\tdefaultFilter string,\n\tattributes []string,\n\tfn processSearchResult) ([]*ldap.Entry, error) {\n\tvar entries []*ldap.Entry\n\treferralQueue := newQueue(\"ldap://\" + originalAddr + \"/\" + originalBaseDN)\n\n\tvisitedReferrals := make(map[string]bool)\n\tfor !referralQueue.isEmpty() {\n\t\treferral := referralQueue.pop()\n\n\t\tldapURL, err := ParseLdapURL(referral, originalBaseDN, originalScope, defaultFilter)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Unable to parse ldap referral [%q], Err: %v\", referral, err)\n\t\t\tcontinue\n\t\t}\n\t\tif ldapURL.Addr == \"\" {\n\t\t\t//no ldap host returned in referral\n\t\t\tlog.Errorf(\"No hostname returned in referral [%q]\", referral)\n\t\t\tcontinue\n\t\t}\n\t\tif visitedReferrals[ldapURL.String()] {\n\t\t\tcontinue\n\t\t}\n\t\tvisitedReferrals[ldapURL.String()] = true\n\n\t\tconn, err := a.getConnectionFn(ldapURL.Addr, tlsOptions)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Error connecting to LDAP [%q], Err: %v\", ldapURL.Addr, err)\n\t\t\tcontinue\n\t\t}\n\t\tdefer conn.Close()\n\n\t\t// First bind with a bind DN\n\t\tif err := conn.Bind(a.ldapConfig.Domains[0].GetBindDN(), a.ldapConfig.Domains[0].GetBindPassword()); err != nil {\n\t\t\tlog.Errorf(\"LDAP [%q] bind operation failed for bind user [%q]: Err: %v\", referral, a.ldapConfig.Domains[0].GetBindDN(), err)\n\t\t\tcontinue\n\t\t}\n\n\t\t// Search\n\t\tsearchRequest := ldap.NewSearchRequest(\n\t\t\tldapURL.BaseDN,\n\t\t\tldapURL.GetScope(), // if referral doesn't contain scope then use scope from original request (rfc4511 section 4.1.10)\n\t\t\tderefAliases, 0, 0, false,\n\t\t\tldapURL.Filter,\n\t\t\tattributes,\n\t\t\tnil,\n\t\t)\n\t\tsr, err := conn.Search(searchRequest)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"LDAP [%q] search operation failed for search request [%#v] Err: %v\", referral, searchRequest, err)\n\t\t\tcontinue\n\t\t}\n\t\t// process search result\n\t\tok, err := fn(referral, conn, sr)\n\t\t// stop searching and return nil entries if fn returns false and there is an error\n\t\tif !ok && err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tentries = append(entries, sr.Entries...)\n\t\t// stop searching and return accumulated entries if fn returns false and there is no error\n\t\tif !ok {\n\t\t\treturn entries, nil\n\t\t}\n\t\treferralQueue.push(sr.Referrals...)\n\t}\n\n\treturn entries, nil\n}", "func SearchUserT(resp http.ResponseWriter, req *http.Request) {\n\tfirstName, ok := req.URL.Query()[\"firstname\"]\n\tif !ok {\n\t\tfmt.Println(\"Url Param 'firstname' is missing\")\n\t}\n\n\tsecondName, ok := req.URL.Query()[\"secondname\"]\n\tif !ok {\n\t\tfmt.Println(\"Url Param 'secondname' is missing\")\n\t}\n\t//fmt.Println(firstName, secondName)\n\n\tusers := model.TarantoolUserSearch(svc.Tarantool, firstName[0], secondName[0])\n\n\tjs, err := json.Marshal(users)\n\tif err != nil {\n\t\tfmt.Println(\"Users marshalling error\")\n\t}\n\n\tresp.Write(js)\n}", "func Contact(id string, token string) (ContactReturn, error) {\n\n\t// Set config for new request\n\tr := Request{\"/contacts/\" + id, \"GET\", token, nil}\n\n\t// Send new request\n\tresponse, err := r.Send()\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Close response body after function ends\n\tdefer response.Body.Close()\n\n\t// Decode data\n\tvar decode ContactReturn\n\n\terr = json.NewDecoder(response.Body).Decode(&decode)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Return data\n\treturn decode, nil\n\n}", "func (repository Users) Search(userQuery string) ([]models.User, error) {\n\tuserQuery = fmt.Sprintf(\"%%%s%%\", userQuery) // %% serve para o escape de caracteres\n\n\tlines, error := repository.db.Query(\"SELECT id, name, nick, email, createdAt FROM users WHERE name LIKE ? OR nick LIKE ?\",\n\t\tuserQuery, userQuery)\n\n\tif error != nil {\n\t\treturn nil, error\n\t}\n\n\tdefer lines.Close()\n\n\tvar users []models.User\n\n\tfor lines.Next() {\n\t\tvar user models.User\n\n\t\tif error = lines.Scan(\n\t\t\t&user.ID,\n\t\t\t&user.Name,\n\t\t\t&user.Email,\n\t\t\t&user.Nick,\n\t\t\t&user.CreatedAt,\n\t\t); error != nil {\n\t\t\treturn nil, error\n\t\t}\n\n\t\tusers = append(users, user)\n\t}\n\n\treturn users, nil\n}", "func Test_Search(t *testing.T) {\n\tfor _, testCase := range searchTestCases {\n\t\tresult := org.Search(org.CEO, testCase.name)\n\n\t\tif testCase.expected && result == nil {\n\t\t\tt.Errorf(\"Expected to find '%s', but did not find\", testCase.name)\n\t\t\tt.Fail()\n\t\t} else if !testCase.expected && result != nil {\n\t\t\tt.Errorf(\"Expected to not find '%s', but found\", testCase.name)\n\t\t}\n\t}\n}", "func (s *HttpServer) Search(w http.ResponseWriter, r *http.Request) {\n\tvar limit int64 = 201\n\tctx := r.Context()\n\tid := r.URL.Query().Get(\"id\")\n\n\tif id == \"\" {\n\t\tid = \"0\"\n\t}\n\tid64, err := strconv.ParseInt(id, 10, 64)\n\tif err != nil {\n\t\thttp.Error(w, exceptions.IntegerRequired.Error(), 500)\n\t\treturn\n\t}\n\n\tdirection := r.URL.Query().Get(\"direction\")\n\n\tquery := r.FormValue(\"query\")\n\tusers, err := s.UserService.FindByNameUC(ctx, query, id64, limit, direction)\n\tvar firstID, lastID int64\n\n\tdata := map[string]interface{}{\n\t\t\"Users\": users,\n\t\t\"Errors\": \"\",\n\t\t\"Query\": query,\n\t\t\"Next\" : false,\n\t}\n\tif err != nil {\n\t\tdata[\"Errors\"] = err.Error()\n\t\ts.RenderTemplate(ctx, w, \"index\", data)\n\t\treturn\n\t}\n\tcount := int64(len(users))\n\tif count > 0 {\n\t\tlastID = users[count-1].ID\n\t\tfirstID = users[0].ID\n\t\tdata[\"Users\"]=users[:count-1]\n\t}\n\tdata[\"FirstID\"] = firstID\n\tdata[\"LastID\"] = lastID\n\tif count > limit-1 {\n\t\tdata[\"Next\"] = true\n\t}\n\ts.RenderTemplate(ctx, w, \"index\", data)\n}", "func (w *WhatsApp) GetContacts(ctx context.Context, userID int64) ([]entities.Contact, error) {\n\tctx, span := trace.StartSpan(ctx, \"whatsapp.GetContacts\")\n\tdefer span.End()\n\n\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, fmt.Sprintf(\"%s/%d/contacts\", w.apiURL, 10), nil)\n\tif err != nil {\n\t\treturn []entities.Contact{}, errors.FailedToDoHTTPRequest\n\t}\n\n\tresp, err := w.client.Do(req)\n\tif err != nil {\n\t\treturn []entities.Contact{}, errors.FailedToDoHTTPRequest\n\t}\n\tdefer resp.Body.Close()\n\n\tvar rawContacts contacts\n\tif err = json.NewDecoder(resp.Body).Decode(&rawContacts); err != nil {\n\t\treturn []entities.Contact{}, errors.ReceivedMalformedJSON\n\t}\n\n\tcontacts := make([]entities.Contact, len(rawContacts.Content))\n\tfor i, c := range rawContacts.Content {\n\t\tcontacts[i] = entities.Contact{\n\t\t\tID: c.JID,\n\t\t\tName: c.Name,\n\t\t}\n\t}\n\n\treturn contacts, nil\n}", "func (m *MalService) SearchPeople(query string, page ...int) (search.PeopleParser, error) {\n\treturn search.InitPeopleParser(m.Config, query, page...)\n}", "func (cs *ContactService) List() ([]ContactResponse, error) {\n\treq, err := cs.client.NewRequest(\"GET\", \"/api/2.0/notification_contacts\", nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresp, err := cs.client.client.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tif err := validateResponse(resp); err != nil {\n\t\treturn nil, err\n\t}\n\n\tbodyBytes, _ := ioutil.ReadAll(resp.Body)\n\tbodyString := string(bodyBytes)\n\tm := &listContactsJsonResponse{}\n\terr = json.Unmarshal([]byte(bodyString), &m)\n\n\treturn m.Contacts, err\n}", "func GetMemberByEmail(w http.ResponseWriter, r *http.Request) {\r\n\tdb := connect()\r\n\tdefer db.Close()\r\n\r\n\tquery := \"SELECT * FROM user\"\r\n\r\n\temail := r.URL.Query()[\"email\"]\r\n\tif email != nil {\r\n\t\tquery += \" WHERE email='\" + email[0] + \"'\"\r\n\t}\r\n\r\n\trows, err := db.Query(query)\r\n\tif err != nil {\r\n\t\tlog.Println(err)\r\n\t}\r\n\r\n\tvar user models.User\r\n\tvar users []models.User\r\n\tfor rows.Next() {\r\n\t\tif err := rows.Scan(&user.Email, &user.Name, &user.Password, &user.TanggalLahir,\r\n\t\t\t&user.JenisKelamin, &user.AsalNegara, &user.Status, &user.TipeUser); err != nil {\r\n\t\t\tsendErrorResponse(w)\r\n\t\t} else {\r\n\t\t\tusers = append(users, user)\r\n\t\t\tsendUserSuccessResponse(w, users)\r\n\t\t}\r\n\t}\r\n}", "func (w *workerData) actionSearch(task *taskRequest, reply *taskReply) error {\n\tif task.search == nil {\n\t\treply.err = errors.New(\"missing search request\")\n\t\treturn errors.New(\"missing search request\")\n\t}\n\n\tvar ldapConn *ldap.Client\n\tif w.ldapro == nil {\n\t\tldapConn = w.ldaprw\n\t} else {\n\t\tldapConn = w.ldapro\n\t}\n\n\treply.sr, reply.err = ldapConn.Search(task.search)\n\tif reply.err != nil {\n\t\treturn reply.err\n\t}\n\treturn nil\n}", "func (app *application) Contacts(listHashStr string) (contacts.Application, error) {\n\tlist, err := app.Retrieve(listHashStr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn app.contactsAppBuilder.Create().WithName(app.name).WithPassword(app.password).WithSeed(app.seed).WithList(list.Hash().String()).Now()\n}", "func ContactCompanyGet(id int64) ([]ContactShort, error) {\n\tvar contacts []ContactShort\n\tif id == 0 {\n\t\treturn contacts, nil\n\t}\n\trows, err := pool.Query(context.Background(), `\n\t\tSELECT\n\t\t\tc.id,\n\t\t\tc.name,\n\t\t\tpo.name AS post_name,\n\t\t\tpog.name AS post_go_name\n\t\tFROM\n\t\t\tcontacts AS c\n\t\tLEFT JOIN\n\t\t\tposts AS po ON c.post_id = po.id\n\t\tLEFT JOIN\n\t\t\tposts AS pog ON c.post_go_id = pog.id\n\t\tWHERE\n\t\t\tc.company_id = ?\n\t\tORDER BY\n\t\t\tname ASC\n\t`, id)\n\tif err != nil {\n\t\terrmsg(\"GetContactCompany query\", err)\n\t}\n\tfor rows.Next() {\n\t\tvar contact ContactShort\n\t\terr := rows.Scan(&contact.ID, &contact.Name, &contact.PostName, &contact.PostGOName)\n\t\tif err != nil {\n\t\t\terrmsg(\"GetCompanyList Scan\", err)\n\t\t\treturn contacts, err\n\t\t}\n\t\tcontacts = append(contacts, contact)\n\t}\n\treturn contacts, rows.Err()\n}", "func (us *UsersService) Search(searchQuery string, queryParams client.QueryParams) (*client.UserSearchResult, error) {\n\tctx := context.Background()\n\tif queryParams == nil || queryParams[\"query\"] == \"\" {\n\t\tqueryParams = make(client.QueryParams)\n\t\tqueryParams[\"query\"] = searchQuery\n\t\treturn us.client.SearchUsers(ctx, queryParams)\n\t}\n\tqueryParams[\"query\"] = searchQuery\n\treturn us.client.SearchUsers(ctx, queryParams)\n}", "func (cl ContactList) DisplayContacts() {\n\tfor _, contact := range cl.directory {\n\t\tfmt.Println(\" Name :\", contact.Name)\n\t\tfmt.Printf(\"Address : %s:%s\\n\", contact.Address, contact.Port)\n\t}\n}", "func (candidates *LookupCandidates) GetContacts(count int) []LookupListItems {\n\treturn candidates.Nodelist[:count]\n}", "func (candidates *LookupCandidates) GetContacts(count int) []LookupListItems {\n\treturn candidates.Nodelist[:count]\n}", "func ListContacts(c messagebird.Client, groupID string, options *messagebird.PaginationRequest) (*contact.Contacts, error) {\n\tformattedPath := fmt.Sprintf(\"%s/%s/%s?%s\", path, groupID, contactPath, options.QueryParams())\n\n\tcontacts := &contact.Contacts{}\n\tif err := c.Request(contacts, http.MethodGet, formattedPath, nil); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn contacts, nil\n}", "func SearchUsers(name string) ([]User, error) {\n\tname = strings.ToLower(name)\n\tusers, err := ListAllUsers()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfiltered := []User{}\n\tfor _, user := range users {\n\t\tif strings.Contains(user.LoginName, name) || strings.Contains(user.FullName, name) {\n\t\t\tfiltered = append(filtered, user)\n\t\t}\n\t}\n\treturn filtered, nil\n}", "func (*UsersController) Contacts(ctx *gin.Context) {\n\tsinceSeconds, err := GetParam(ctx, \"sinceSeconds\")\n\tif err != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": \"Error while getting seconds parameter\"})\n\t\treturn\n\t}\n\tseconds, err := strconv.ParseInt(sinceSeconds, 10, 64)\n\tif err != nil {\n\t\tctx.JSON(http.StatusBadRequest, gin.H{\"error\": \"Invalid since parameter : must be an interger\"})\n\t\treturn\n\t}\n\n\tvar user = tat.User{}\n\tfound, err := userDB.FindByUsername(&user, getCtxUsername(ctx))\n\tif !found {\n\t\tctx.JSON(http.StatusInternalServerError, errors.New(\"User unknown\"))\n\t\treturn\n\t} else if err != nil {\n\t\tctx.JSON(http.StatusInternalServerError, errors.New(\"Error while fetching user\"))\n\t\treturn\n\t}\n\tcriteria := tat.PresenceCriteria{}\n\tfor _, contact := range user.Contacts {\n\t\tcriteria.Username = criteria.Username + \",\" + contact.Username\n\t}\n\tcriteria.DateMinPresence = strconv.FormatInt(time.Now().Unix()-seconds, 10)\n\tcount, presences, _ := presenceDB.ListPresences(&criteria)\n\n\tout := &tat.ContactsJSON{\n\t\tContacts: user.Contacts,\n\t\tCountContactsPresences: count,\n\t\tContactsPresences: &presences,\n\t}\n\tctx.JSON(http.StatusOK, out)\n}", "func (o *GetContactsParams) WithPhonenumbers(phonenumbers []string) *GetContactsParams {\n\to.SetPhonenumbers(phonenumbers)\n\treturn o\n}", "func contacts(queries [][]string) []int32 {\n\t// Write your code here\n\tprefixes := map[string]int32{}\n\tvar sol []int32\n\n\tfor _, query := range queries {\n\t\tword := query[1]\n\t\tif query[0] == \"add\" {\n\t\t\taddPrefixes(prefixes, word)\n\t\t} else if query[0] == \"find\" {\n\t\t\tsol = append(sol, prefixes[word])\n\t\t}\n\t}\n\n\treturn sol\n}", "func (service *EmployeeService) SearchEmployee(query string) ([]models.Employee, error) {\n\tcollection := service.mongoClient.Database(DbName).Collection(CollectionName)\n\n\tcursor, err := collection.Find(context.Background(), bson.M{\n\t\t\"$text\": bson.M{\n\t\t\t\"$search\": query,\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tvar employees []models.Employee\n\tif err = cursor.All(context.Background(), &employees); err != nil {\n\t\tfmt.Println(err)\n\t\tlog.Fatal(err)\n\t}\n\n\treturn employees, nil\n}", "func (m *User) GetContacts()([]Contactable) {\n return m.contacts\n}", "func (g *Domain) GetContacts(domain string) (contacts Contacts, err error) {\n\t_, err = g.client.Get(\"domains/\"+domain+\"/contacts\", nil, &contacts)\n\treturn\n}", "func (rec *Record) Search(text string) bool {\n\n\tif strings.Contains(strings.ToLower(rec.Title), text) {\n\t\treturn true\n\t}\n\tif strings.Contains(strings.ToLower(rec.Account), text) {\n\t\treturn true\n\t}\n\tif strings.Contains(strings.ToLower(rec.Password), text) {\n\t\treturn true\n\t}\n\tif strings.Contains(strings.ToLower(strings.Join(rec.Tags, \", \")), text) {\n\t\treturn true\n\t}\n\tif strings.Contains(strings.ToLower(rec.Url), text) {\n\t\treturn true\n\t}\n\tif strings.Contains(strings.ToLower(rec.Notes), text) {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (m *GraphBaseServiceClient) Contacts()(*if51cca2652371587dbc02e65260e291435a6a8f7f2ffb419f26c3b9d2a033f57.ContactsRequestBuilder) {\n return if51cca2652371587dbc02e65260e291435a6a8f7f2ffb419f26c3b9d2a033f57.NewContactsRequestBuilderInternal(m.pathParameters, m.requestAdapter);\n}", "func (m *GraphBaseServiceClient) Contacts()(*if51cca2652371587dbc02e65260e291435a6a8f7f2ffb419f26c3b9d2a033f57.ContactsRequestBuilder) {\n return if51cca2652371587dbc02e65260e291435a6a8f7f2ffb419f26c3b9d2a033f57.NewContactsRequestBuilderInternal(m.pathParameters, m.requestAdapter);\n}", "func (estor *ElasticStore) Search(ctx context.Context, searchTerms []string, searchName bool, mustMap map[string][]string, scrollID string) ([]string, error) {\n\tquery := elastic.NewBoolQuery()\n\n\tfor _, term := range searchTerms {\n\t\tif searchName {\n\t\t\tquery = query.Should(elastic.NewFuzzyQuery(\"FirstName\", term).Fuzziness(\"AUTO\"))\n\t\t\tquery = query.Should(elastic.NewFuzzyQuery(\"LastName\", term).Fuzziness(\"AUTO\"))\n\t\t}\n\t}\n\n\tresults := []string{}\n\n\tscroll := estor.client.Scroll().\n\t\tIndex(estor.eIndex).\n\t\tQuery(query).\n\t\tSize(10).\n\t\tSort(\"_score\", false)\n\n\tif scrollID != \"\" {\n\t\tscroll = scroll.ScrollId(scrollID)\n\t}\n\n\tres, err := scroll.Do(ctx)\n\tif (err != nil && err != io.EOF) || res == nil {\n\t\treturn results, err\n\t}\n\n\tfor _, element := range res.Hits.Hits {\n\t\tresults = append(results, element.Id)\n\t}\n\treturn results, nil\n}", "func (req fedSearchRequest) search(logger log.Logger, searcher *searcher, searchType string) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tswitch searchType {\n\t\tcase ACH:\n\t\t\tachP, err := searcher.ACHFind(extractSearchLimit(r), req)\n\t\t\tif err != nil {\n\t\t\t\tmoovhttp.Problem(w, err)\n\t\t\t}\n\t\t\tw = setResponseHeader(w)\n\t\t\tif err := json.NewEncoder(w).Encode(&searchResponse{ACHParticipants: achP}); err != nil {\n\t\t\t\tmoovhttp.Problem(w, err)\n\t\t\t\treturn\n\t\t\t}\n\t\tcase WIRE:\n\t\t\twireP, err := searcher.WIREFind(extractSearchLimit(r), req)\n\t\t\tif err != nil {\n\t\t\t\tmoovhttp.Problem(w, err)\n\t\t\t}\n\t\t\tw = setResponseHeader(w)\n\t\t\tif err := json.NewEncoder(w).Encode(&searchResponse{WIREParticipants: wireP}); err != nil {\n\t\t\t\tmoovhttp.Problem(w, err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t}\n\n\t}\n}" ]
[ "0.7460258", "0.6589947", "0.65563786", "0.6236289", "0.6131786", "0.59814835", "0.5881753", "0.58775383", "0.5791408", "0.575084", "0.57119125", "0.5702953", "0.56781363", "0.5622006", "0.5616035", "0.5596071", "0.5584662", "0.55772245", "0.55752546", "0.5566689", "0.55325955", "0.5532431", "0.55261636", "0.5519902", "0.548993", "0.54719496", "0.54532826", "0.54362935", "0.5436214", "0.54184926", "0.5418319", "0.5412967", "0.53993917", "0.5394594", "0.5389255", "0.53817934", "0.5380783", "0.5374613", "0.5374335", "0.53709805", "0.53462374", "0.53443474", "0.5324858", "0.5308675", "0.53048927", "0.5304197", "0.52991325", "0.52977085", "0.5290476", "0.52903205", "0.5274131", "0.52616143", "0.5261299", "0.5259372", "0.5256211", "0.5242505", "0.52375984", "0.523326", "0.5227135", "0.5222189", "0.5217097", "0.5212411", "0.52046657", "0.51957846", "0.51882356", "0.5184823", "0.5183099", "0.51788634", "0.51787895", "0.5171351", "0.5141194", "0.5120051", "0.5119229", "0.5118356", "0.5118217", "0.5112314", "0.5110032", "0.5104496", "0.51027393", "0.5097517", "0.5096695", "0.50962865", "0.5093624", "0.50865084", "0.50858086", "0.5081155", "0.5081155", "0.5070037", "0.50550765", "0.5053531", "0.50136846", "0.5013607", "0.5011858", "0.50104475", "0.49994373", "0.4996324", "0.49859273", "0.49859273", "0.49650088", "0.49635485" ]
0.768913
0
CreateContact pushes a single contact to the Hatchbuck API
func (api HatchbuckClient) CreateContact(contact Contact) (Contact, error) { var c Contact endpoint := fmt.Sprintf("%v/contact?api_key=%v", api.baseURL, api.key) payload, _ := json.Marshal(contact) res, err := http.Post(endpoint, "application/json", bytes.NewBuffer(payload)) log.Println(res.StatusCode) if err != nil { return c, err } decoder := json.NewDecoder(res.Body) err = decoder.Decode(&c) if err != nil { return c, err } return c, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CreateContact(w http.ResponseWriter, r *http.Request) {\n\t// Grab the id of the user that send the request\n\tuser := r.Context().Value(middleware.User(\"user\")).(uint)\n\tcontact := &model.Contact{}\n\n\terr := json.NewDecoder(r.Body).Decode(contact)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tresponse.RespondWithStatus(\n\t\t\tw,\n\t\t\tresponse.Message(false, \"Error while decoding request body\"),\n\t\t\thttp.StatusBadRequest,\n\t\t)\n\t\treturn\n\t}\n\n\tcontact.UserID = user\n\tdata, status, ok := contact.Create()\n\tif !ok {\n\t\tresponse.RespondWithStatus(w, data, status)\n\t}\n\tresponse.RespondWithStatus(w, data, status)\n}", "func CreateContact(w http.ResponseWriter, r *http.Request) {\n\n\terr := r.ParseForm()\n\n\tif err != nil {\n\t\tsetting.Renderer.JSON(w, http.StatusBadRequest, err)\n\t}\n\n\tcontact := new(models.Contact)\n\tcontact.ID = bson.NewObjectId()\n\n\tdecoder := schema.NewDecoder()\n\terr = decoder.Decode(contact, r.PostForm)\n\n\tif err != nil {\n\t\tapi.RenderError(w, http.StatusBadRequest, err)\n\n\t\treturn\n\t}\n\n\terr = store.GetDB().Save(contact)\n\n\tif err != nil {\n\t\tapi.RenderError(w, http.StatusBadRequest, err)\n\n\t\treturn\n\t}\n\n\tsetting.Renderer.JSON(w, http.StatusOK, contact)\n}", "func (a *Client) CreateContact(params *CreateContactParams, authInfo runtime.ClientAuthInfoWriter) error {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewCreateContactParams()\n\t}\n\n\t_, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"createContact\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/contacts\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &CreateContactReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func createContact(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tvar contact schema.Contact\n\n\t// we decode our body request params\n\t_ = json.NewDecoder(r.Body).Decode(&contact)\n\n\tresult, err := contacts.InsertOne(context.TODO(), contact)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tjson.NewEncoder(w).Encode(result)\n}", "func (sdk *Sdk) CreateContact(filePath, attrs, tags string) (string, error) {\n\tsdkC := sdk.connect\n\textraData := fmt.Sprintf(`attrs=%s&&tags=%s`, attrs, tags)\n\n\treturn sdkC.rq.PostFile(\"/api/contacts/upload\", filePath, \"file\", extraData)\n}", "func (h *Handler) CreateContact(c echo.Context) (err error) {\n\tcontact := &model.Contact{\n\t\tID: bson.NewObjectId(),\n\t}\n\tif err = c.Bind(contact); err != nil {\n\t\treturn\n\t}\n\n\t// Name and email are mandatory\n\tif contact.Name == \"\" || contact.Email == \"\" {\n\t\treturn &echo.HTTPError{Code: http.StatusBadRequest, Message: \"Invalid / missing fields\"}\n\t}\n\n\t// Save contact in database\n\tdb := h.DB.Clone()\n\tdefer db.Close()\n\tif err = db.DB(\"sampark\").C(\"contacts\").Insert(contact); err != nil {\n\t\tif mgo.IsDup(err) {\n\t\t\tmsg := \"Contact with given email already exists\"\n\t\t\treturn &echo.HTTPError{Code: http.StatusBadRequest, Message: msg}\n\t\t}\n\t\treturn\n\t}\n\treturn c.JSON(http.StatusCreated, contact)\n}", "func (s *ContactsService) Create(contactAttributes Contact) (Contact, *Response, error) {\n\tpath := contactPath(nil)\n\twrappedContact := contactWrapper{Contact: contactAttributes}\n\treturnedContact := contactWrapper{}\n\n\tres, err := s.client.post(path, wrappedContact, &returnedContact)\n\tif err != nil {\n\t\treturn Contact{}, res, err\n\t}\n\n\treturn returnedContact.Contact, res, nil\n}", "func (cs *ContactService) Create(contact *Contact) (*ContactResponse, error) {\n\tif err := contact.Valid(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := cs.client.NewRequest(\"POST\", \"/api/2.0/notification_contacts\", contact.PostParams())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tm := &contactDetailsJsonResponse{}\n\t_, err = cs.client.Do(req, m)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn m.Contact, err\n}", "func (contact *Contact) CreateContact(accountId uint) map[string]interface{} {\n\t// check for empty data\n\tif contact.PhoneNumber == \"\" || contact.Email == \"\" {\n\t\treturn utl.Message(102, \"the following fields are required: phone_number and email\")\n\t}\n\n\t// validate email\n\tif err := checkmail.ValidateFormat(contact.Email); err != nil {\n\t\treturn utl.Message(102, \"email address is not valid\")\n\t}\n\n\t// validate phone number\n\t// should not be less than 9 digits and more than 12 chars\n\t// accepted: 0712345678, 254712345678, 712345678\n\t// store: 712345678\n\tif len(contact.PhoneNumber) > 12 || len(contact.PhoneNumber) < 9 {\n\t\treturn utl.Message(102, \"enter a valid phone number, between 9 to 12 digits.\")\n\t}\n\n\tif strings.HasPrefix(contact.PhoneNumber, \"254\") {\n\t\tphoneNumber_ := contact.PhoneNumber[3:len(contact.PhoneNumber)]\n\t\tcontact.PhoneNumber = phoneNumber_\n\t}\n\n\tif strings.HasPrefix(contact.PhoneNumber, \"0\") {\n\t\tphoneNumber_ := contact.PhoneNumber[1:len(contact.PhoneNumber)]\n\t\tcontact.PhoneNumber = phoneNumber_\n\t}\n\n\t// save the contact in DB\n\tcontact.AccountID = accountId\n\tDBConnection.Table(\"contact\").Create(contact)\n\tif contact.ID <= 0 {\n\t\treturn utl.Message(105, \"failed to save contact, tyr again\")\n\t}\n\n\tresponse := utl.Message(0, \"contact has been created\")\n\tresponse[\"data\"] = contact\n\treturn response\n}", "func (cs *UserService) CreateContact(userID int, contact Contact) (*CreateUserContactResponse, error) {\n\tif err := contact.ValidContact(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := cs.client.NewRequest(\"POST\", \"/users/\"+strconv.Itoa(userID), contact.PostContactParams())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tm := &createUserContactJSONResponse{}\n\t_, err = cs.client.Do(req, m)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn m.Contact, err\n}", "func (s *Service) ContactsCreate(contactModRequest *model.ContactModRequest) *ContactsCreateOp {\n\treturn &ContactsCreateOp{\n\t\tCredential: s.credential,\n\t\tMethod: \"POST\",\n\t\tPath: \"contacts\",\n\t\tPayload: contactModRequest,\n\t\tQueryOpts: make(url.Values),\n\t\tVersion: esign.APIv21,\n\t}\n}", "func (r *TCPMonitorRepository) CreateContact(contact MonitoringContact) error {\n\trestRequest := rest.Request{Endpoint: \"/monitoring-contacts\", Body: &contact}\n\n\treturn r.Client.Post(restRequest)\n}", "func (s *Service) Create(c CreateContactData) (*Contact, error) {\n\tcontact, err := s.ContactsRepository.Create(Contact{\n\t\tFirstName: c.FirstName,\n\t\tLastName: c.LastName,\n\t})\n\n\tif err != nil {\n\t\tmsg := fmt.Sprintf(\"error while creating a new contact: %v\", err)\n\t\ts.Logger.Error(msg)\n\t\treturn nil, errors.New(msg)\n\t}\n\n\tif len(c.Emails) != 0 {\n\t\temails, err := s.EmailRepository.Create(contact.ID, c.Emails...)\n\t\tif err != nil {\n\t\t\tmsg := fmt.Sprintf(\"error while inserting contact's emails: %v\", err)\n\t\t\ts.Logger.Error(msg)\n\t\t\treturn nil, errors.New(msg)\n\t\t}\n\n\t\tcontact.Emails = emails\n\t}\n\n\tif len(c.Phones) != 0 {\n\t\tphones, err := s.PhoneRepository.Create(contact.ID, c.Phones...)\n\t\tif err != nil {\n\t\t\tmsg := fmt.Sprintf(\"error while inserting contact's phone: %v\", err)\n\t\t\ts.Logger.Error(msg)\n\t\t\treturn nil, errors.New(msg)\n\t\t}\n\n\t\tcontact.Phones = phones\n\t}\n\n\treturn contact, nil\n}", "func (sdk *Sdk) CreateContactWithBody(body string) (string, error) {\n\tsdkC := sdk.connect\n\n\treturn sdkC.rq.PostJSON(\"/api/contacts\", body)\n}", "func (a *Client) SafeContactCreate(params *SafeContactCreateParams) (*SafeContactCreateOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewSafeContactCreateParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"safeContactCreate\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/domainSafeContact\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &SafeContactCreateReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*SafeContactCreateOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for safeContactCreate: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (ats *ActionService) CreateContactAction(request *ContactActionDataInputModel) (*Result, error) {\n\trequestBody, err := json.Marshal(request)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse, err := ats.client.makeRequestWithHeaders(\n\t\t\"POST\",\n\t\t\"/action/contact\",\n\t\tbytes.NewBuffer(requestBody),\n\t\tmap[string]string{\n\t\t\t\"Content-Type\": \"application/json-patch+json\",\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer response.Body.Close()\n\tresponseBody := new(Result)\n\tjson.NewDecoder(response.Body).Decode(responseBody)\n\n\tif len(responseBody.ValidationErrors) > 0 {\n\t\terrors := Errors{}\n\t\tfor _, err := range responseBody.ValidationErrors {\n\t\t\terrors = append(errors, fmt.Errorf(\"%s: %s\", err.Field, err.Message))\n\t\t}\n\t\treturn nil, errors\n\t}\n\n\tif _, ok := responseBody.Data.(ActionOutputModel); ok {\n\t\treturn responseBody, nil\n\t}\n\treturn nil, Errors{errors.New(\"response.data is not of type Action\")}\n}", "func Test_AddContact(t *testing.T) {\n\t// SETUP:\n\t// A standard Env. defer is used to ensure the env is cleaned up after the test.\n\tenv := test.SetupEnv(t)\n\tdefer env.Close()\n\n\t// TEST: Adding a contact via the API.\n\tcontact, err := env.Client.AddContact(service.AddContactRequest{\n\t\tEmail: \"[email protected]\",\n\t\tName: \"Alice Zulu\",\n\t})\n\n\t// VERIFY: Response contains the contact\n\trequire.NoError(t, err, \"Unable to get contact via API\")\n\trequire.NotEmpty(t, contact, \"Contact not found\")\n\tassert.True(t, contact.Id > 0, \"Contact ID is missing\")\n\tassert.Equal(t, contact.Email, \"[email protected]\")\n\tassert.Equal(t, contact.Name, \"Alice Zulu\")\n\n\t// VERIFY: Contact is added to the database properly.\n\tdbContact := env.ReadContactWithEmail(\"[email protected]\")\n\trequire.NotEmpty(t, dbContact, \"Contact not found\")\n\tassert.Equal(t, dbContact.Email, \"[email protected]\")\n\tassert.Equal(t, dbContact.Name, \"Alice Zulu\")\n}", "func AddContact(body ContactBody, token string) (ContactReturn, error) {\n\n\t// Convert data\n\tconvert, err := json.Marshal(body)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Set config for new request\n\tr := Request{\"/contacts\", \"POST\", token, convert}\n\n\t// Send new request\n\tresponse, err := r.Send()\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Close response body after function ends\n\tdefer response.Body.Close()\n\n\t// Decode data\n\tvar decode ContactReturn\n\n\terr = json.NewDecoder(response.Body).Decode(&decode)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Return data\n\treturn decode, nil\n\n}", "func DefaultCreateContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultCreateContact\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\taccountID, err := auth.GetAccountID(ctx, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tormObj.AccountID = accountID\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func insertContact(r *http.Request, svc *mirror.Service) string {\n\tc := appengine.NewContext(r)\n\tc.Infof(\"Inserting contact\")\n\tname := r.FormValue(\"name\")\n\timageUrl := r.FormValue(\"imageUrl\")\n\tif name == \"\" || imageUrl == \"\" {\n\t\treturn \"Must specify imageUrl and name to insert contact\"\n\t}\n\tif strings.HasPrefix(imageUrl, \"/\") {\n\t\timageUrl = fullURL(r.Host, imageUrl)\n\t}\n\n\tbody := mirror.Contact{\n\t\tDisplayName: name,\n\t\tId: strings.Replace(name, \" \", \"_\", -1),\n\t\tImageUrls: []string{imageUrl},\n\t}\n\n\tif _, err := svc.Contacts.Insert(&body).Do(); err != nil {\n\t\treturn fmt.Sprintf(\"Unable to insert contact: %s\", err)\n\t}\n\treturn fmt.Sprintf(\"Inserted contact: %s\", name)\n}", "func (r CreateContactRequest) Send() (*CreateContactOutput, error) {\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn r.Request.Data.(*CreateContactOutput), nil\n}", "func DefaultCreateContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultCreateContact\")\n\t}\n\tormObj, err := ConvertContactToORM(*in)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = db.Create(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ConvertContactFromORM(ormObj)\n\treturn &pbResponse, err\n}", "func (*UsersController) AddContact(ctx *gin.Context) {\n\tcontactIn, err := GetParam(ctx, \"username\")\n\tif err != nil {\n\t\treturn\n\t}\n\tuser, err := PreCheckUser(ctx)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tvar contact = tat.User{}\n\tfound, err := userDB.FindByUsername(&contact, contactIn)\n\tif !found {\n\t\tAbortWithReturnError(ctx, http.StatusBadRequest, fmt.Errorf(\"user with username %s does not exist\", contactIn))\n\t\treturn\n\t} else if err != nil {\n\t\tAbortWithReturnError(ctx, http.StatusInternalServerError, fmt.Errorf(\"Error while fetching user with username %s\", contactIn))\n\t\treturn\n\t}\n\n\tif err := userDB.AddContact(&user, contact.Username, contact.Fullname); err != nil {\n\t\tAbortWithReturnError(ctx, http.StatusInternalServerError, fmt.Errorf(\"Error while add contact %s to user:%s\", contact.Username, user.Username))\n\t\treturn\n\t}\n\tctx.JSON(http.StatusCreated, \"\")\n}", "func (contact *Contact) Create() error {\n\treturn DB().Create(contact).Error\n}", "func TestContactAddCreateUser(t *testing.T) {\n\tdb := database.Connect()\n\tu := models.User{\n\t\tEmail: \"[email protected]\",\n\t}\n\tu.Create(db)\n\tut, _ := u.AddToken(db)\n\n\ttype Data struct {\n\t\tName string\n\t\tEmail string\n\t}\n\td := Data{Name: \"test\", Email: \"[email protected]\"}\n\tj, _ := json.Marshal(d)\n\tb := bytes.NewBuffer(j)\n\n\tr, err := http.NewRequest(\"POST\", \"/\", b)\n\tr.Header.Add(\"Content-Type\", \"application/json\")\n\tr.Header.Add(\"X-Access-Token\", ut.Token)\n\tif err != nil {\n\t\tt.Errorf(\"Unexpected error\", err)\n\t}\n\n\tw := httptest.NewRecorder()\n\tc := SetupWebContext()\n\tContactAdd(c, w, r)\n\tif w.Code != http.StatusAccepted {\n\t\tt.Errorf(\"%v expected, got %v instead\", http.StatusAccepted, w.Code)\n\t}\n}", "func (c *Client) AddContact(contactId string) error {\n\tif !c.authenticated {\n\t\treturn errors.New(\"Not authenticated. Call Authenticate first\")\n\t}\n\n\tpayload := make(map[string]interface{})\n\tpayload[\"contact_id\"] = contactId\n\tmsg := common.NewMessage(c.userId, \"server\",\n\t\t\"control\", \"add_contact\", time.Time{},\n\t\tcommon.TEXT, payload)\n\n\tencoded, err := msg.Json()\n\tif err != nil {\n\t\tlog.Println(\"Failed to encode add contact message\", err)\n\t\treturn err\n\t}\n\n\terr = c.transport.SendText(string(encoded))\n\tif err != nil {\n\t\tlog.Println(\"Failed to send add contact message\", err)\n\t\treturn err\n\t}\n\n\tresp := <-c.Out\n\tif resp.Status() == common.STATUS_ERROR {\n\t\terrMsg := resp.Error()\n\t\tlog.Println(\"Add contact response error\", errMsg)\n\t\treturn errors.New(errMsg)\n\t}\n\n\treturn nil\n}", "func (mj *Mojo) AddContact(contacts ...Contact) error {\n\treqbody, err := json.Marshal(contacts)\n\tif err != nil {\n\t\treturn &ErrInvalid{Msg: err.Error()}\n\t}\n\turl := prefixHTTP(mj.URL) + \"/api/contacts/bulk_create/\"\n\tresbody, err := mj.post(url, reqbody)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar data mojoResponse\n\tif err := json.Unmarshal(resbody, &data); err != nil {\n\t\treturn fmt.Errorf(\"mojo: POST %s %s decoding %s (%v)\", url, string(reqbody), string(resbody), err)\n\t}\n\tif data.isLockedError() {\n\t\treturn fmt.Errorf(\"mojo: %s\", data.errorMsg())\n\t}\n\tif data.isDuplicate() {\n\t\treturn &ErrDuplicate{IDs: data.duplicatedIDs()}\n\t}\n\tif data.isError() {\n\t\treturn &ErrInvalid{Msg: data.errorMsg()}\n\t}\n\treturn nil\n}", "func (m *ContactsRequestBuilder) Post(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Contactable, requestConfiguration *ContactsRequestBuilderPostRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Contactable, error) {\n requestInfo, err := m.CreatePostRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateContactFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Contactable), nil\n}", "func NewCfnContact(scope awscdk.Construct, id *string, props *CfnContactProps) CfnContact {\n\t_init_.Initialize()\n\n\tj := jsiiProxy_CfnContact{}\n\n\t_jsii_.Create(\n\t\t\"monocdk.aws_ssmcontacts.CfnContact\",\n\t\t[]interface{}{scope, id, props},\n\t\t&j,\n\t)\n\n\treturn &j\n}", "func NewRelatedContact()(*RelatedContact) {\n m := &RelatedContact{\n }\n m.backingStore = ie8677ce2c7e1b4c22e9c3827ecd078d41185424dd9eeb92b7d971ed2d49a392e.BackingStoreFactoryInstance();\n m.SetAdditionalData(make(map[string]any))\n return m\n}", "func PostClientContact(c *gin.Context) {\n\t_contact := ClientContact{\n\t\tID: dbcontroller.UniqueID(11),\n\t\tClientID: c.PostForm(\"clientId\"),\n\t\tFirstName: c.PostForm(\"firstName\"),\n\t\tLastName: c.PostForm(\"lastName\"),\n\t\tEmail: c.PostForm(\"email\"),\n\t\tPhone: c.PostForm(\"phone\"),\n\t\tTitle: c.PostForm(\"title\"),\n\t}\n\n\tdb := dbcontroller.Database(dbName)\n\tdefer db.Close()\n\n\tif err := db.Table(\"client_contacts\").Create(&_contact).Error; err != nil {\n\t\tpanic(err)\n\t}\n\n\tc.JSON(http.StatusCreated, gin.H{\"status\": http.StatusCreated,\n\t\t\"message\": \"Contact created successfully!\", \"contactId\": _contact.ID})\n}", "func NewContact(contactName string, contactEmail string, ) *Contact {\n\tthis := Contact{}\n\tthis.ContactName = contactName\n\tthis.ContactEmail = contactEmail\n\treturn &this\n}", "func CreateSearchAlertContactRequest() (request *SearchAlertContactRequest) {\n\trequest = &SearchAlertContactRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"ARMS\", \"2019-08-08\", \"SearchAlertContact\", \"arms\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}", "func (user *User) CreateContactConversation(ctx context.Context, userID string) (*Conversation, error) {\n\treturn user.CreateConversation(ctx, &CreateConversationInput{\n\t\tCategory: ConversationCategoryContact,\n\t\tConversationID: utils.UniqueConversationID(user.UserID, userID),\n\t\tParticipants: []*Participant{{UserID: userID}},\n\t})\n}", "func (handler *Handler) CreateAddress(ctx context.Context, in *candyland_grpc.CreateAddressRequest) (*candyland_grpc.CreateAddressReply, error) {\n\terr := handler.CreateAddressUsecase.Create(in.UserId, in.CountryId, in.StateId, in.CityId, in.StreetId, in.Number, in.Complement.String())\n\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn nil, err\n\t}\n\n\tres := &candyland_grpc.CreateAddressReply{\n\t\tWasCreated: true,\n\t}\n\n\treturn res, nil\n}", "func CreateGetContactRequest() (request *GetContactRequest) {\n\trequest = &GetContactRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"Subscription\", \"2021-01-15\", \"GetContact\", \"\", \"\")\n\treturn\n}", "func Contact() *ContactInfo { return contact(globalFaker.Rand) }", "func (c *RBController) Contact(w http.ResponseWriter, r *http.Request) (err error) {\n\tc.HTML(w, http.StatusOK, \"contact\", nil)\n\treturn nil\n}", "func (cpr *ContactPostgresRepository) AddContact(ctx context.Context, contact Contact) (Contact, error) {\n\tif _, err := cpr.db.ModelContext(ctx, &contact).Insert(); err != nil {\n\t\treturn Contact{}, fmt.Errorf(\"postgres repository: create contact: %w\", err)\n\t}\n\n\treturn contact, nil\n}", "func (a *ShippingContactsApiService) CreateCustomerShippingContacts(ctx context.Context, id string) ApiCreateCustomerShippingContactsRequest {\n\treturn ApiCreateCustomerShippingContactsRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tid: id,\n\t}\n}", "func RegisterContactAPI(router *httprouter.Router, opt *Options) {\n\t// Validation\n\tvar err error\n\tswitch {\n\tcase router == nil:\n\t\terr = errors.New(\"router must not be nil\")\n\tcase strings.TrimSpace(opt.RootDir) == \"\":\n\t\terr = errors.New(\"root directory must not be empty\")\n\tcase strings.TrimSpace(opt.FilePrefix) == \"\":\n\t\terr = errors.New(\"file prefix must not be empty\")\n\tcase opt.Revision == 0:\n\t\terr = errors.New(\"revision must not be 0\")\n\t}\n\thandleError(err)\n\n\tc := &contactAPI{\n\t\trootDir: opt.RootDir,\n\t\tfilePrefix: opt.FilePrefix,\n\t\trevision: opt.Revision,\n\t\tmu: &sync.RWMutex{},\n\t\tcontacts: make(map[int]*ContactData, 0),\n\t\tcountiesMap: make(map[string]int, 0),\n\t}\n\n\t// read from file\n\tfile, err := os.Open(filepath.Join(opt.RootDir, fmt.Sprintf(\"%s-v%d.json\", opt.FilePrefix, opt.Revision)))\n\thandleError(err)\n\tdefer file.Close()\n\n\t// update data from file\n\tcontact := &ContactData{}\n\terr = json.NewDecoder(file).Decode(contact)\n\thandleError(err)\n\n\t// get json\n\tbs, err := json.Marshal(contact)\n\thandleError(err)\n\n\t// Update contacts map\n\tfor index, countyHotline := range contact.CountiesHotlines {\n\t\tc.countiesMap[countyHotline.County] = index\n\t}\n\n\t// add the contact only if it doesn't exist\n\t_, err = revisionManager.Get(contactGroup, contact.Revision)\n\tif gorm.IsRecordNotFoundError(err) {\n\t\terr = revisionManager.Add(&revision{\n\t\t\tRevision: contact.Revision,\n\t\t\tResourceGroup: contactGroup,\n\t\t\tData: bs,\n\t\t})\n\t\thandleError(err)\n\t}\n\n\tdur := time.Duration(int(30*time.Minute) + rand.Intn(30))\n\n\tgo updateRevisionWorker(dur, func() {\n\t\t// get new revision\n\t\trevisions, err := revisionManager.List(contactGroup)\n\t\tif err != nil {\n\t\t\tlogrus.Infof(\"failed to list revisions from database: %v\", err)\n\t\t\treturn\n\t\t}\n\n\t\t// Update Map\n\t\tc.mu.Lock()\n\t\tdefer c.mu.Unlock()\n\n\t\tfor _, revision := range revisions {\n\t\t\tcontact := &ContactData{}\n\t\t\terr = json.Unmarshal(revision.Data, contact)\n\t\t\tif err != nil {\n\t\t\t\tlogrus.Infof(\"failed to unmarshal revision: %v\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tc.contacts[revision.Revision] = contact\n\t\t\tc.revision = revision.Revision\n\t\t}\n\n\t\tlogrus.Infoln(\"Contacts updated\")\n\t})\n\n\t// Update endpoints\n\trouter.GET(\"/rest/v1/contacts\", c.GetContact)\n\trouter.PUT(\"/rest/v1/contacts\", c.UpdateContact)\n\trouter.GET(\"/rest/v1/contacts/hotlines/nearest\", c.GetNearestHotlines)\n\trouter.GET(\"/rest/v1/contacts/hotlines/county\", c.GetCountyHotlines)\n}", "func handleContact(resp http.ResponseWriter, request *http.Request) {\n\tcors := handleCors(resp, request)\n\tif cors {\n\t\treturn\n\t}\n\n\tbody, err := ioutil.ReadAll(request.Body)\n\tif err != nil {\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false, \"reason\": \"%s\"}`, err)))\n\t\treturn\n\t}\n\n\tvar t Contact\n\terr = json.Unmarshal(body, &t)\n\tif err != nil {\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false, \"reason\": \"%s\"}`, err)))\n\t\treturn\n\t}\n\n\tif len(t.Email) < 3 || len(t.Message) == 0 {\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false, \"reason\": \"Please fill a valid email and message\"}`)))\n\t\treturn\n\t}\n\n\tctx := context.Background()\n\tmailContent := fmt.Sprintf(\"Firsname: %s\\nLastname: %s\\nTitle: %s\\nCompanyname: %s\\nPhone: %s\\nEmail: %s\\nMessage: %s\", t.Firstname, t.Lastname, t.Title, t.Companyname, t.Phone, t.Email, t.Message)\n\tlog.Printf(\"Sending contact from %s\", t.Email)\n\n\tmsg := &mail.Message{\n\t\tSender: \"Shuffle <[email protected]>\",\n\t\tTo: []string{\"[email protected]\"},\n\t\tSubject: \"Shuffler.io - New contact form\",\n\t\tBody: mailContent,\n\t}\n\n\tif err := mail.Send(ctx, msg); err != nil {\n\t\tlog.Printf(\"Couldn't send email: %v\", err)\n\t}\n\n\tresp.WriteHeader(200)\n\tresp.Write([]byte(fmt.Sprintf(`{\"success\": true, \"message\": \"Thanks for reaching out. We will contact you soon!\"}`)))\n}", "func (kb *Kbucket) AddContact(cont Contact) bool {\n\tnewCont := new(Contact)\n\tnewCont.NodeID = CopyID(cont.NodeID)\n\tnewCont.Host = cont.Host\n\tnewCont.Port = cont.Port\n\n\tkb.ContactList = append(kb.ContactList, cont)\n\n\treturn true\n}", "func (s *Service) Post(w http.ResponseWriter, r *http.Request) {\n\tcontent, err := ioutil.ReadAll(r.Body)\n\tdefer r.Body.Close()\n\tif err != nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\tc := &Contacts{}\n\tif err = json.Unmarshal(content, c); err != nil {\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif s != nil {\n\t\tif s.Store != nil {\n\t\t\ts.mutex.Lock()\n\t\t\tdefer s.mutex.Unlock()\n\t\t\ts.Store.Contacts = append(s.Store.Contacts, c)\n\t\t}\n\t}\n}", "func (api HatchbuckClient) SearchContact(criteria SearchCriteria) ([]Contact, error) {\n\tvar c []Contact\n\tendpoint := fmt.Sprintf(\"%v/contact/search?api_key=%v\", api.baseURL, api.key)\n\tpayload, _ := json.Marshal(criteria)\n\tres, err := http.Post(endpoint, \"application/json\", bytes.NewBuffer(payload))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdecoder := json.NewDecoder(res.Body)\n\terr = decoder.Decode(&c)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn c, nil\n}", "func (c *client) SendContact(args SendContactArgs) (Message, TelegramError) {\n\treturn c.sendJSONMessage(args)\n}", "func (*CreateContactRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_essentialcontacts_v1_service_proto_rawDescGZIP(), []int{5}\n}", "func (o *SafeContactCreateParams) WithContext(ctx context.Context) *SafeContactCreateParams {\n\to.SetContext(ctx)\n\treturn o\n}", "func (api HatchbuckClient) UpdateContact(contact Contact) (Contact, error) {\n\tvar c Contact\n\tclient := &http.Client{}\n\tendpoint := fmt.Sprintf(\"%v/contact?api_key=%v\", api.baseURL, api.key)\n\tpayload, _ := json.Marshal(contact)\n\treq, err := http.NewRequest(\"PUT\", endpoint, bytes.NewBuffer(payload))\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\tres, err := client.Do(req)\n\tlog.Println(res.StatusCode)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\tdecoder := json.NewDecoder(res.Body)\n\terr = decoder.Decode(&c)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\treturn c, nil\n}", "func (o *SafeContactCreateParams) WithSafeContact(safeContact *models.DomainSafeAccount) *SafeContactCreateParams {\n\to.SetSafeContact(safeContact)\n\treturn o\n}", "func (tx *Transaction) AddContact(c Contact) int64 {\n\trs, err := tx.Exec(\n\t\t\"INSERT INTO contacts (email, name) VALUES (?, ?)\",\n\t\tc.Email,\n\t\tc.Name,\n\t)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tid, err := rs.LastInsertId()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn id\n}", "func Add(ctx context.Context, db *mongo.Database, c Contact) error {\n\t_, err := db.Collection(contactsCollection).InsertOne(ctx, c)\n\tif err != nil {\n\t\treturn status.Errorf(\n\t\t\tcodes.Internal, \"adding contact\",\n\t\t)\n\t}\n\treturn nil\n}", "func (f *Faker) Contact() *ContactInfo { return contact(f.Rand) }", "func (db *Database) AddContact(c Contact) (int64, error) {\n\tvar contactID int64\n\terr := db.Write(func(tx *Transaction) {\n\t\tcontactID = tx.AddContact(c)\n\t})\n\n\treturn contactID, err\n}", "func (client *KeyVaultClient) setCertificateContactsCreateRequest(ctx context.Context, vaultBaseURL string, contacts Contacts, options *KeyVaultClientSetCertificateContactsOptions) (*policy.Request, error) {\n\thost := \"{vaultBaseUrl}\"\n\thost = strings.ReplaceAll(host, \"{vaultBaseUrl}\", vaultBaseURL)\n\turlPath := \"/certificates/contacts\"\n\treq, err := runtime.NewRequest(ctx, http.MethodPut, runtime.JoinPaths(host, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"7.2\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, runtime.MarshalAsJSON(req, contacts)\n}", "func TestSetContact(t *testing.T) {\n\tvar client *RedisFakeClient\n\taddressbook := d.AddressBook{}\n\tcontact := c.Contact{\"Jonas\",\"[email protected]\", \"0791234567\", c.Address{\"12 Fake Street\", \"Fake City\", \"FA\"}}\n\tok, errorCode := addressbook.SetContact(client, contact, 1);\n\tif !ok {\n\t\tt.Fail()\n\t}\n\tt.Logf(\"Can set contact: %s, ok: %t\", errorCode, ok)\n}", "func (b *Bot) SendContact(request axon.O) (result axon.O, err error) {\n\tvar response interface{}\n\tif response, err = b.doPost(\"sendContact\", request); err == nil {\n\t\tresult = response.(map[string]interface{})\n\t}\n\treturn\n}", "func (op *ContactsCreateOp) Do(ctx context.Context) (*model.ContactUpdateResponse, error) {\n\tvar res *model.ContactUpdateResponse\n\treturn res, ((*esign.Op)(op)).Do(ctx, &res)\n}", "func (impl controllerImpl) CreateCustomer(w http.ResponseWriter, r *http.Request) {\n\n\t// Read the body\n\tvar customerDto CustomerDTO\n\tdecoder := json.NewDecoder(r.Body)\n\terr := decoder.Decode(&customerDto)\n\tif err != nil {\n\t\thttp2.HandleBadRequest(w, r.URL.Path, err)\n\t\treturn\n\t}\n\n\t// Translate\n\tcustomer := ToEntity(customerDto)\n\n\t// Create\n\tnewCustomer, err := impl.service.CreateCustomer(customer)\n\tif err != nil {\n\t\thttp2.HandleBadRequest(w, r.URL.Path, err)\n\t\treturn\n\t}\n\n\t// Translate\n\tcustomerDto = ToContract(*newCustomer)\n\n\t// Good data, return JSON\n\thttp2.HandleSuccess(w, http.StatusCreated, customerDto)\n}", "func (cl ContactList) Create(contacts []Contact) ContactList {\n\tcl.directory = contacts\n\treturn cl\n}", "func Contact(id string, token string) (ContactReturn, error) {\n\n\t// Set config for new request\n\tr := Request{\"/contacts/\" + id, \"GET\", token, nil}\n\n\t// Send new request\n\tresponse, err := r.Send()\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Close response body after function ends\n\tdefer response.Body.Close()\n\n\t// Decode data\n\tvar decode ContactReturn\n\n\terr = json.NewDecoder(response.Body).Decode(&decode)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Return data\n\treturn decode, nil\n\n}", "func PutClientContact(c *gin.Context) {\n\tid := c.Param(\"id\")\n\tvar rowCount int\n\n\tdb := dbcontroller.Database(dbName)\n\tdefer db.Close()\n\n\t// Make sure the client exists first\n\tif err := db.Table(\"client_contacts\").Where(\"id = ?\", id).Count(&rowCount).Error; err != nil {\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"status\": http.StatusNotFound,\n\t\t\t\"message\": err.Error()})\n\t\treturn\n\t}\n\n\tif rowCount == 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"status\": http.StatusNotFound,\n\t\t\t\"message\": \"Could not find contact\"})\n\t\treturn\n\t}\n\n\t// Update the client\n\taffectedRows := db.Table(\"client_contacts\").Where(\"id = ?\", id).Update(map[string]interface{}{\n\t\t\"ClientID\": c.PostForm(\"clientId\"),\n\t\t\"FirstName\": c.PostForm(\"firstName\"),\n\t\t\"LastName\": c.PostForm(\"lastName\"),\n\t\t\"Email\": c.PostForm(\"email\"),\n\t\t\"Phone\": c.PostForm(\"phone\"),\n\t\t\"Title\": c.PostForm(\"title\"),\n\t}).RowsAffected\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"status\": http.StatusOK,\n\t\t\"message\": \"Contact has been updated\",\n\t\t\"affectedRows\": affectedRows})\n}", "func NewAddContact(ctx *middleware.Context, handler AddContactHandler) *AddContact {\n\treturn &AddContact{Context: ctx, Handler: handler}\n}", "func Add(timeline string, contact string) *methods.Method {\n\tname := \"rtm.contacts.add\"\n\n\tp := url.Values{}\n\tp.Add(\"method\", name)\n\tp.Add(\"timeline\", timeline)\n\tp.Add(\"contact\", contact)\n\t\n\treturn &methods.Method{Name: name, Params: p}\n}", "func (c client) StoreContacts(contacts []*Contact, lists []string) error {\n\tentity := storeContactsRequest{\n\t\tContacts: contacts,\n\t\tLists: lists,\n\t}\n\n\tdata, err := json.Marshal(entity)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"PUT\", urls.Join(c.base, \"/marketing/contacts\"), bytes.NewReader(data))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_, data, err = c.Send(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (p *Phonebook) AddContact(contact *Contact) {\n\tp.Contacts = append(p.Contacts, contact)\n}", "func (a *ShippingContactsApiService) CreateCustomerShippingContactsExecute(r ApiCreateCustomerShippingContactsRequest) (*CustomerShippingContactsResponse, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *CustomerShippingContactsResponse\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"ShippingContactsApiService.CreateCustomerShippingContacts\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/customers/{id}/shipping_contacts\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", url.PathEscape(parameterValueToString(r.id, \"id\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.customerShippingContacts == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"customerShippingContacts is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/vnd.conekta-v2.1.0+json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.acceptLanguage != nil {\n\t\tparameterAddToHeaderOrQuery(localVarHeaderParams, \"Accept-Language\", r.acceptLanguage, \"\")\n\t}\n\tif r.xChildCompanyId != nil {\n\t\tparameterAddToHeaderOrQuery(localVarHeaderParams, \"X-Child-Company-Id\", r.xChildCompanyId, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.customerShippingContacts\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := io.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 422 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 500 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (r CreateAddressBookRequest) Send() (*CreateAddressBookOutput, error) {\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn r.Request.Data.(*CreateAddressBookOutput), nil\n}", "func (c Candidate) Create() error {\n\n\t// required sanity check\n\tif c.FirstName == \"\" || c.LastName == \"\" || c.Email == \"\" {\n\t\treturn fmt.Errorf(\n\t\t\t\"missing one of these required fields firstname: %q, lastname: %q, email: %q\",\n\t\t\tc.FirstName, c.LastName, c.Email,\n\t\t)\n\t}\n\n\t// marshal the candidate to buffered bytes representing JSON\n\tb, err := json.Marshal(c)\n\tif err != nil {\n\t\treturn err\n\t}\n\tbody := bytes.NewBuffer(b)\n\n\t// create a new request\n\turl := URL.String() + candidate\n\treq, err := http.NewRequest(http.MethodPost, url, body)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set API key for authentication and authorization\n\treq.SetBasicAuth(clientID, clientSecret)\n\n\t// send the HTTP request with the default Go client\n\tresp, err := http.DefaultClient.Do(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// check the HTTP response status code is 201\n\tif resp.StatusCode != http.StatusCreated {\n\n\t\t// read the HTTP response body\n\t\tdefer resp.Body.Close()\n\t\tb, err = ioutil.ReadAll(resp.Body)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// return the HTTP response body as an error\n\t\treturn errors.New(string(b))\n\t}\n\n\treturn nil\n}", "func ContactInsert(contact Contact) (int64, error) {\n\terr := pool.QueryRow(context.Background(), `\n\t\tINSERT INTO contacts\n\t\t(\n\t\t\tname,\n\t\t\tcompany_id,\n\t\t\tdepartment_id,\n\t\t\tpost_id,\n\t\t\tpost_go_id,\n\t\t\trank_id,\n\t\t\tbirthday,\n\t\t\tnote,\n\t\t\tcreated_at,\n\t\t\tupdated_at\n\t\t)\n\t\tVALUES\n\t\t(\n\t\t\t$1,\n\t\t\t$2,\n\t\t\t$3,\n\t\t\t$4,\n\t\t\t$5,\n\t\t\t$6,\n\t\t\t$7,\n\t\t\t$8,\n\t\t\t$9,\n\t\t\t$10\n\t\t)\n\t\tRETURNING\n\t\t\tid\n\t`, contact.Name, contact.CompanyID, contact.DepartmentID, contact.PostID, contact.PostGOID, contact.RankID, contact.Birthday, contact.Note,\n\t\ttime.Now(), time.Now()).Scan(&contact.ID)\n\tif err != nil {\n\t\terrmsg(\"ContactInsert QueryRow\", err)\n\t\treturn 0, err\n\t}\n\t_ = EmailContactUpdate(contact.ID, contact.Emails)\n\t_ = PhoneContactUpdate(contact.ID, contact.Phones, false)\n\t_ = PhoneContactUpdate(contact.ID, contact.Faxes, true)\n\treturn contact.ID, nil\n}", "func (o *SafeContactCreateParams) WithTimeout(timeout time.Duration) *SafeContactCreateParams {\n\to.SetTimeout(timeout)\n\treturn o\n}", "func (h *campaignHandler) CreateCampaign(c *gin.Context) {\n\tvar input campaign.CreateCampaignInput\n\n\terr := c.ShouldBindJSON(&input)\n\tif err != nil {\n\n\t\terrors := helper.FormatValidationError(err)\n\n\t\terrorMessage := gin.H{\"errors\": errors}\n\n\t\tresponse := helper.APIResponse(\"Failed to create campaign\", http.StatusUnprocessableEntity, \"error\", errorMessage)\n\t\tc.JSON(http.StatusUnprocessableEntity, response)\n\t\treturn\n\t}\n\n\tcurrentUser := c.MustGet(\"currentUser\").(user.User)\n\n\tinput.User = currentUser\n\n\tnewCampaign, err := h.service.CreateCampaign(input)\n\tif err != nil {\n\t\tresponse := helper.APIResponse(\"Failed to create campaign\", http.StatusBadRequest, \"error\", nil)\n\t\tc.JSON(http.StatusBadRequest, response)\n\t\treturn\n\t}\n\n\tresponse := helper.APIResponse(\"Success to create campaign\", http.StatusOK, \"success\", campaign.FormatCampaign(newCampaign))\n\tc.JSON(http.StatusOK, response)\n}", "func (c ClientFake) CreateCampaign(name, destination string) (Campaign, error) {\n\treturn Campaign{}, nil\n}", "func (o *SafeContactCreateParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.SafeContact != nil {\n\t\tif err := r.SetBodyParam(o.SafeContact); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *SafeContactCreateParams) WithHTTPClient(client *http.Client) *SafeContactCreateParams {\n\to.SetHTTPClient(client)\n\treturn o\n}", "func ConvertToContact(from contacts1.Contact) Contact {\n\tto := Contact{}\n\tto.ID = from.Id\n\tto.FirstName = from.FirstName\n\tto.MiddleName = from.MiddleName\n\tto.LastName = from.LastName\n\tto.EmailAddress = from.EmailAddress\n\treturn to\n}", "func (a *API) CreateCompetence(ctx *app.Context, w http.ResponseWriter, r *http.Request) error {\n\tvar input model.Competence\n\n\tdefer r.Body.Close()\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := json.Unmarshal(body, &input); err != nil {\n\t\treturn err\n\t}\n\n\tid, err := ctx.CreateCompetence(&input)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tctx.Logger.Infof(\"Competence id: %d\\n\", id)\n\tw.Write([]byte(fmt.Sprintf(\"%d\", id)))\n\n\treturn nil\n}", "func (r *CompaniesLeadsService) Create(companyId string, createleadrequest *CreateLeadRequest) *CompaniesLeadsCreateCall {\n\tc := &CompaniesLeadsCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.companyId = companyId\n\tc.createleadrequest = createleadrequest\n\treturn c\n}", "func (n *PeernotifyNode) registerContact(key []byte, contact *pb.Contact) error {\n\treturn storeContact(n.Pending, key, contact)\n}", "func (c *ClientWithResponses) CreateanewSystemContactWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader) (*CreateanewSystemContactResponse, error) {\n\trsp, err := c.CreateanewSystemContactWithBody(ctx, contentType, body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseCreateanewSystemContactResponse(rsp)\n}", "func CreatePerson(c *gin.Context) {\n // Validate input\n var input CreatePersonInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Create person\n person := models.Person{CreatedBy: input.CreatedBy, FirstName: input.FirstName, LastName: input.LastName, Email: input.Email, Phone: input.Phone, Birthday: input.Birthday, Title: input.Title, Department: input.Department}\n models.DB.Create(&person)\n\n c.JSON(http.StatusOK, gin.H{\"data\": person})\n}", "func NewContactService(sling *sling.Sling) *ContactService {\n\treturn &ContactService{\n\t\tsling: sling.Path(\"contacts/\"),\n\t}\n}", "func (r *CompaniesService) Create(company *Company) *CompaniesCreateCall {\n\tc := &CompaniesCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.company = company\n\treturn c\n}", "func (cl ContactList) AddContact(newContact Contact) {\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\n\t\t\tdir := cl.directory\n\t\t\tlength := len(dir)\n\n\t\t\tif length == cap(dir) {\n\t\t\t\t// Directory is full, needs to expand\n\t\t\t\tnewDir := make([]Contact, len(dir), 2*len(dir)+1)\n\t\t\t\tcopy(newDir, dir)\n\t\t\t\tdir = newDir\n\t\t\t}\n\n\t\t\tdir = dir[0 : length+1]\n\t\t\tdir[length] = newContact\n\n\t\t\tcl.directory = dir\n\n\t\t\tfmt.Println(\"Contact Added\")\n\t\t} else {\n\t\t\tfmt.Println(\"Contact Already Exists\")\n\t\t}\n\t}()\n\n\tcl.FindContact(newContact.Name)\n}", "func (c *Client) SearchContact(contactName string) (string, error) {\n\tif !c.authenticated {\n\t\treturn \"\", errors.New(\"Not authenticated. Call Authenticate first\")\n\t}\n\n\tpayload := make(map[string]interface{})\n\tpayload[\"contact_name\"] = contactName\n\tmsg := common.NewMessage(c.userId, \"server\",\n\t\t\"control\", \"seach_contact\", time.Time{},\n\t\tcommon.TEXT, payload)\n\n\tencoded, err := msg.Json()\n\tif err != nil {\n\t\tlog.Println(\"Failed to encode search contact message\", err)\n\t\treturn \"\", err\n\t}\n\n\terr = c.transport.SendText(string(encoded))\n\tif err != nil {\n\t\tlog.Println(\"Failed to send search contact message\", err)\n\t\treturn \"\", err\n\t}\n\n\tresp := <-c.Out\n\tif resp.Status() == common.STATUS_ERROR {\n\t\terrMsg := resp.Error()\n\t\tlog.Println(\"Search contact response error\", errMsg)\n\t\treturn \"\", errors.New(errMsg)\n\t}\n\n\tcontactId := resp.GetJsonData(\"contact_id\").(string)\n\treturn contactId, nil\n}", "func (d *DynamoConn) AddContact(userID string, contact *models.Contact) error {\n\tvar ContactsUpdate struct {\n\t\tContacts []*models.Contact `json:\":c\"`\n\t}\n\n\t// Retrieve user from database\n\tuser, err := d.GetUser(userID)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Add the new contact to the slice and marshal the update\n\tContactsUpdate.Contacts = append(user.Contacts, contact)\n\n\texpr, err := dynamodbattribute.MarshalMap(ContactsUpdate)\n\tif err != nil {\n\t\treturn err\n\n\t}\n\n\t// Define table schema's key\n\tkey := map[string]*dynamodb.AttributeValue{\n\t\t\"user_id\": {\n\t\t\tS: aws.String(user.UserID),\n\t\t},\n\t}\n\n\t// Use marshalled map for UpdateItemInput\n\titem := &dynamodb.UpdateItemInput{\n\t\tExpressionAttributeValues: expr,\n\t\tTableName: aws.String(common.UsersTableName),\n\t\tKey: key,\n\t\tReturnValues: aws.String(\"UPDATED_NEW\"),\n\t\tUpdateExpression: aws.String(\"set contacts = :c\"),\n\t}\n\n\t// Invoke the update\n\t_, err = d.Client.UpdateItem(item)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (s *service) CreateConsignment(ctx context.Context, req *pb.Consignment, res *pb.Response) error {\n\t// Save our consignment\n\tconsignment, err := s.repo.Create(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Return matching the `Response message we created in our protobuf def.\n\tres.Created = true\n\tres.Consignment = consignment\n\n\treturn nil\n}", "func CreateAddress(address models.RequestAddress) (bool, models.ResponseAddress, error) {\n\n\t//Create request\n\trequest := models.Request{}\n\trequest.AddBody(address)\n\trequest.SetUri(\"https://api.easypost.com/v2/addresses\")\n\trequest.SetMethod(\"POST\")\n\n\t//Send request\n\tresponseBody, err := SendRequest(request)\n\n\t//Initialize response address\n\tresponseAddress := models.ResponseAddress{}\n\n\terr = json.Unmarshal(responseBody, &responseAddress)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false, responseAddress, fmt.Errorf(\"Unrecognized response from easypost %s\", err.Error())\n\t}\n\n\treturn responseAddress.Verifications.Delivery.Success, responseAddress, err\n}", "func (s *service) CreateConsignment(ctx context.Context, req *pb.Consignment, res *pb.Response) error {\n\n\t// Save our consignment\n\tconsignment, err := s.repo.Create(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Return matching the `Response` message we created in our\n\t// protobuf definition.\n\tres.Created = true\n\tres.Consignment = consignment\n\treturn nil\n}", "func (m *CreateCustomerContactPayload) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (a *ApiDB) CreateContract(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tp := MODELS.CREATE_UPDATE_CONTRACT_REQUEST{}\n\terr := json.NewDecoder(r.Body).Decode(&p)\n\tif err != nil {\n\t\tio.WriteString(w, `{\"message\": \"wrong format!\"}`)\n\t\treturn\n\t}\n\n\tresult := BUSINESS.CreateContract(a.Db, p)\n\tif result {\n\t\tio.WriteString(w, ` { \"status\": 200,\n \"message\": \"Create contract success\",\n \"data\": {\n \"status\": 1\n }\n}\n`)\n\t} else {\n\t\tio.WriteString(w, `{ \"message\": \"Can’t create contract\"}`)\n\t}\n}", "func (c *Culqi) CreateCustomer(params *CustomerParams) (*Customer, error) {\n\n\tif params == nil {\n\t\treturn nil, fmt.Errorf(\"params are empty\")\n\t}\n\n\treqJSON, err := json.Marshal(params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"POST\", defaultBaseURL+\"v2/\"+customerBase, bytes.NewBuffer(reqJSON))\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\treq.Header.Set(\"Authorization\", \"Bearer \"+c.Conf.APIKey)\n\treq.Header.Set(\"User-Agent\", userAgent)\n\n\tresp, err := c.HTTP.Do(req)\n\n\tif resp.StatusCode >= 400 {\n\t\treturn nil, extractError(resp)\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tt := Customer{}\n\n\tif err := json.Unmarshal(body, &t); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &t, nil\n}", "func (a *Client) PostCustomersCustomerIDContacts(params *PostCustomersCustomerIDContactsParams, authInfo runtime.ClientAuthInfoWriter) (*PostCustomersCustomerIDContactsOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPostCustomersCustomerIDContactsParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PostCustomersCustomerIDContacts\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/customers/{customerId}/contacts\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PostCustomersCustomerIDContactsReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PostCustomersCustomerIDContactsOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for PostCustomersCustomerIDContacts: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (s *SmartContract) createCustomer(APIstub shim.ChaincodeStubInterface, args []string) sc.Response {\n\n\tif len(args) != 6 {\n\t\treturn shim.Error(\"Incorrect number of arguments for creating a customer. Expecting 6\")\n\t}\n\tbalance, _ := strconv.ParseFloat(args[4], 64)\n\tcustomers := []Customer{{Name: args[0], CustID: args[1], Country: args[2], Currency: args[3], Balance: balance, CustomerBankID: args[5]}}\n\n\twriteCustomerToLedger(APIstub, customers)\n\n\treturn shim.Success(nil)\n}", "func (m *MailgunImpl) CreateCampaign(name, id string) error {\n\tr := simplehttp.NewHTTPRequest(generateApiUrl(m, campaignsEndpoint))\n\tr.SetClient(m.Client())\n\tr.SetBasicAuth(basicAuthUser, m.ApiKey())\n\n\tpayload := simplehttp.NewUrlEncodedPayload()\n\tpayload.AddValue(\"name\", name)\n\tif id != \"\" {\n\t\tpayload.AddValue(\"id\", id)\n\t}\n\t_, err := makePostRequest(r, payload)\n\treturn err\n}", "func (u *User) SetContact(value bool) {\n\tif value {\n\t\tu.Flags.Set(11)\n\t\tu.Contact = true\n\t} else {\n\t\tu.Flags.Unset(11)\n\t\tu.Contact = false\n\t}\n}", "func (sf speedFixationUsecase) CreateRecord(fixation repo.SpeedFixation) error {\n\treturn sf.contactRepo.CreateRecord(fixation)\n}", "func (s *ContactsServiceImpl) ContactsDeleteContact(ctx context.Context, request *mtproto.TLContactsDeleteContact) (*mtproto.Contacts_Link, error) {\n\tmd := grpc_util.RpcMetadataFromIncoming(ctx)\n\tglog.Infof(\"contacts.deleteContact#8e953744 - metadata: %s, request: %s\", logger.JsonDebugData(md), logger.JsonDebugData(request))\n\n\tvar (\n\t\tdeleteId int32\n\t\tid = request.Id\n\t)\n\n\tswitch id.GetConstructor() {\n\tcase mtproto.TLConstructor_CRC32_inputUserSelf:\n\t\tdeleteId = md.UserId\n\tcase mtproto.TLConstructor_CRC32_inputUser:\n\t\t// Check access hash\n\t\tif ok := s.UserModel.CheckAccessHashByUserId(id.GetData2().GetUserId(), id.GetData2().GetAccessHash()); !ok {\n\t\t\t// TODO(@benqi): Add ACCESS_HASH_INVALID codes\n\t\t\terr := mtproto.NewRpcError2(mtproto.TLRpcErrorCodes_BAD_REQUEST)\n\t\t\tglog.Error(err, \": is access_hash error\")\n\t\t\treturn nil, err\n\t\t}\n\n\t\tdeleteId = id.GetData2().GetUserId()\n\t\t// TODO(@benqi): contact exist\n\tdefault:\n\t\t// mtproto.TLConstructor_CRC32_inputUserEmpty:\n\t\terr := mtproto.NewRpcError2(mtproto.TLRpcErrorCodes_BAD_REQUEST)\n\t\tglog.Error(err, \": is inputUserEmpty\")\n\t\treturn nil, err\n\t}\n\n\t// selfUser := user2.GetUserById(md.UserId, md.UserId)\n\tdeleteUser := s.UserModel.GetUserById(md.UserId, deleteId)\n\n\tcontactLogic := s.ContactModel.MakeContactLogic(md.UserId)\n\tneedUpdate := contactLogic.DeleteContact(deleteId, deleteUser.Data2.MutualContact)\n\n\tselfUpdates := updates2.NewUpdatesLogic(md.UserId)\n\tmyLink, foreignLink := s.UserModel.GetContactLink(md.UserId, deleteId)\n\tcontactLink := &mtproto.TLUpdateContactLink{Data2: &mtproto.Update_Data{\n\t\tUserId: deleteId,\n\t\tMyLink: myLink,\n\t\tForeignLink: foreignLink,\n\t}}\n\tselfUpdates.AddUpdate(contactLink.To_Update())\n\tselfUpdates.AddUser(deleteUser)\n\n\t// TODO(@benqi): handle seq\n\tsync_client.GetSyncClient().SyncUpdatesNotMe(md.UserId, md.AuthId, selfUpdates.ToUpdates())\n\n\t// TODO(@benqi): 推给联系人逻辑需要再考虑考虑\n\tif needUpdate {\n\t\t// TODO(@benqi): push to contact user update contact link\n\t\tcontactUpdates := updates2.NewUpdatesLogic(deleteUser.Data2.Id)\n\t\tmyLink, foreignLink := s.UserModel.GetContactLink(deleteId, md.UserId)\n\t\tcontactLink2 := &mtproto.TLUpdateContactLink{Data2: &mtproto.Update_Data{\n\t\t\tUserId: md.UserId,\n\t\t\tMyLink: myLink,\n\t\t\tForeignLink: foreignLink,\n\t\t}}\n\t\tcontactUpdates.AddUpdate(contactLink2.To_Update())\n\n\t\tselfUser := s.UserModel.GetUserById(deleteId, md.UserId)\n\t\tcontactUpdates.AddUser(selfUser)\n\t\tsync_client.GetSyncClient().PushUpdates(deleteId, contactUpdates.ToUpdates())\n\t}\n\n\t////////////////////////////////////////////////////////////////////////////////////////\n\tcontactsLink := &mtproto.TLContactsLink{Data2: &mtproto.Contacts_Link_Data{\n\t\tMyLink: contactLink.Data2.MyLink,\n\t\tForeignLink: contactLink.Data2.ForeignLink,\n\t\tUser: deleteUser,\n\t}}\n\n\tglog.Infof(\"contacts.deleteContact#8e953744 - reply: %s\", logger.JsonDebugData(contactsLink))\n\treturn contactsLink.To_Contacts_Link(), nil\n}", "func CreatePerson(w http.ResponseWriter, r *http.Request) {\n\tvar person model.Person\n\t/*\n\t\tTo print the response to string\n\t*/\n\tbodyBytes, _ := ioutil.ReadAll(r.Body)\n\tbodyString := string(bodyBytes)\n\tfmt.Println(bodyString)\n\n\t/*\n\t\tParse JSON object without struct\n\t*/\n\tm := map[string]interface{}{}\n\terr := json.Unmarshal(bodyBytes, &m)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(m)\n\tfmt.Println(m[\"firstname\"])\n\n\tjson.Unmarshal(bodyBytes, &person) // parse JSON to person struct object\n\tfmt.Println(person.Firstname)\n\tpeople = append(people, person)\n\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\tjson.NewEncoder(w).Encode(people)\n}", "func (r *RoomFactory) Create(contactList []_interface.IContact, topic string) (_interface.IRoom, error) {\n\tif len(contactList) < 2 {\n\t\treturn nil, errors.New(\"contactList need at least 2 contact to create a new room\")\n\t}\n\tcontactIDList := make([]string, len(contactList))\n\tfor index, contact := range contactList {\n\t\tcontactIDList[index] = contact.ID()\n\t}\n\troomID, err := r.GetPuppet().RoomCreate(contactIDList, topic)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn r.Load(roomID), nil\n}" ]
[ "0.8154985", "0.8095125", "0.8062395", "0.7933438", "0.75778615", "0.7570038", "0.75698894", "0.74707085", "0.73633796", "0.7331577", "0.732853", "0.7290394", "0.7207978", "0.7154919", "0.7104803", "0.7100227", "0.7064629", "0.6741858", "0.6672317", "0.6627906", "0.65964246", "0.65132296", "0.6371403", "0.6301066", "0.6284508", "0.6237714", "0.6225515", "0.614549", "0.60785735", "0.60756576", "0.6060467", "0.6048975", "0.60453564", "0.60383344", "0.59996635", "0.5983509", "0.5979609", "0.5896084", "0.58852303", "0.5848871", "0.5815556", "0.58044666", "0.579231", "0.5791075", "0.57773846", "0.57156277", "0.57133913", "0.57043767", "0.5652256", "0.5648574", "0.56373847", "0.5629311", "0.5618597", "0.56132066", "0.5609001", "0.56062627", "0.5605288", "0.5602408", "0.5545539", "0.5521636", "0.5508369", "0.5505111", "0.54938453", "0.54785454", "0.5474741", "0.54442686", "0.5442988", "0.5435642", "0.54316086", "0.5429603", "0.5425696", "0.54215986", "0.54165715", "0.54161847", "0.5386213", "0.53850764", "0.53752184", "0.5362338", "0.5342787", "0.53403366", "0.534", "0.5326719", "0.53238416", "0.5308559", "0.53005636", "0.52956575", "0.52941316", "0.5292001", "0.52901804", "0.52860296", "0.52794236", "0.52753866", "0.52748644", "0.5264333", "0.5263562", "0.52545094", "0.52474654", "0.52438956", "0.5239495", "0.5233293" ]
0.8757528
0
UpdateContact updates a single contact to the Hatchbuck API
func (api HatchbuckClient) UpdateContact(contact Contact) (Contact, error) { var c Contact client := &http.Client{} endpoint := fmt.Sprintf("%v/contact?api_key=%v", api.baseURL, api.key) payload, _ := json.Marshal(contact) req, err := http.NewRequest("PUT", endpoint, bytes.NewBuffer(payload)) req.Header.Add("Content-Type", "application/json") res, err := client.Do(req) log.Println(res.StatusCode) if err != nil { return c, err } decoder := json.NewDecoder(res.Body) err = decoder.Decode(&c) if err != nil { return c, err } return c, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (a *Client) UpdateContact(params *UpdateContactParams, authInfo runtime.ClientAuthInfoWriter) error {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewUpdateContactParams()\n\t}\n\n\t_, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"updateContact\",\n\t\tMethod: \"PUT\",\n\t\tPathPattern: \"/contacts/{contactId}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &UpdateContactReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (s *API) UpdateContact(req *UpdateContactRequest, opts ...scw.RequestOption) (*Contact, error) {\n\tvar err error\n\n\tif fmt.Sprint(req.ContactID) == \"\" {\n\t\treturn nil, errors.New(\"field ContactID cannot be empty in request\")\n\t}\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"PATCH\",\n\t\tPath: \"/domain/v2alpha2/contacts/\" + fmt.Sprint(req.ContactID) + \"\",\n\t\tHeaders: http.Header{},\n\t}\n\n\terr = scwReq.SetBody(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar resp Contact\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "func UpdateContact(body ContactBody, token string) (ContactReturn, error) {\n\n\t// Convert data\n\tconvert, err := json.Marshal(body)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Set config for new request\n\tr := Request{fmt.Sprintf(\"/contacts/%d\", body.Id), \"PUT\", token, convert}\n\n\t// Send new request\n\tresponse, err := r.Send()\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Close response body after function ends\n\tdefer response.Body.Close()\n\n\t// Decode data\n\tvar decode ContactReturn\n\n\terr = json.NewDecoder(response.Body).Decode(&decode)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Return data\n\treturn decode, nil\n\n}", "func (s *RegistrarAPI) UpdateContact(req *RegistrarAPIUpdateContactRequest, opts ...scw.RequestOption) (*Contact, error) {\n\tvar err error\n\n\tif fmt.Sprint(req.ContactID) == \"\" {\n\t\treturn nil, errors.New(\"field ContactID cannot be empty in request\")\n\t}\n\n\tscwReq := &scw.ScalewayRequest{\n\t\tMethod: \"PATCH\",\n\t\tPath: \"/domain/v2beta1/contacts/\" + fmt.Sprint(req.ContactID) + \"\",\n\t\tHeaders: http.Header{},\n\t}\n\n\terr = scwReq.SetBody(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar resp Contact\n\n\terr = s.client.Do(scwReq, &resp, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resp, nil\n}", "func (c *Client) UpdateContact(ctx context.Context, params *UpdateContactInput, optFns ...func(*Options)) (*UpdateContactOutput, error) {\n\tif params == nil {\n\t\tparams = &UpdateContactInput{}\n\t}\n\n\tresult, metadata, err := c.invokeOperation(ctx, \"UpdateContact\", params, optFns, c.addOperationUpdateContactMiddlewares)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tout := result.(*UpdateContactOutput)\n\tout.ResultMetadata = metadata\n\treturn out, nil\n}", "func (cs *UserService) UpdateContact(userID int, contactID int, contact Contact) (*PingdomResponse, error) {\n\tif err := contact.ValidContact(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := cs.client.NewRequest(\"PUT\", \"/users/\"+strconv.Itoa(userID)+\"/\"+strconv.Itoa(contactID), contact.PutContactParams())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tm := &PingdomResponse{}\n\t_, err = cs.client.Do(req, m)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn m, err\n}", "func (contact *Contact) UpdateContact(contactId uint) map[string]interface{} {\n\t// validate email\n\tif contact.Email != \"\" {\n\t\tif err := checkmail.ValidateFormat(contact.Email); err != nil {\n\t\t\treturn utl.Message(102, \"email address is not valid\")\n\t\t}\n\t}\n\n\t// validate phone number\n\t// should not be less than 9 digits and more than 12 chars\n\t// accepted: 0712345678, 254712345678, 712345678\n\t// store: 712345678\n\tif contact.PhoneNumber != \"\" {\n\t\tif len(contact.PhoneNumber) > 12 || len(contact.PhoneNumber) < 9 {\n\t\t\treturn utl.Message(102, \"enter a valid phone number, between 9 to 12 digits.\")\n\t\t}\n\n\t\tif strings.HasPrefix(contact.PhoneNumber, \"254\") {\n\t\t\tphoneNumber_ := contact.PhoneNumber[3:len(contact.PhoneNumber)]\n\t\t\tcontact.PhoneNumber = phoneNumber_\n\t\t}\n\n\t\tif strings.HasPrefix(contact.PhoneNumber, \"0\") {\n\t\t\tphoneNumber_ := contact.PhoneNumber[1:len(contact.PhoneNumber)]\n\t\t\tcontact.PhoneNumber = phoneNumber_\n\t\t}\n\t}\n\n\t// update contact record\n\terr := DBConnection.Table(\"contact\").Model(contact).Where(\"id=?\", contactId).Updates(contact).Error\n\tif err != nil {\n\t\tlog.Printf(\"WARNING | An error occurred while updating contact: %v\\n\", err.Error())\n\t\treturn utl.Message(105, \"failed to update contact, try again later\")\n\t}\n\n\t// fetch and return updated contact\n\tDBConnection.Table(\"contact\").First(contact, contactId)\n\tresponse := utl.Message(0, \"contact updated successfully\")\n\tresponse[\"data\"] = contact\n\treturn response\n}", "func (s *ContactsService) Update(contactID int, contactAttributes Contact) (Contact, *Response, error) {\n\tpath := contactPath(contactID)\n\twrappedContact := contactWrapper{Contact: contactAttributes}\n\treturnedContact := contactWrapper{}\n\n\tres, err := s.client.put(path, wrappedContact, &returnedContact)\n\tif err != nil {\n\t\treturn Contact{}, res, err\n\t}\n\n\treturn returnedContact.Contact, res, nil\n}", "func ContactUpdate(contact Contact) error {\n\t_, err := pool.Exec(context.Background(), `\n\t\tUPDATE contacts SET\n\t\t\tname = $2,\n\t\t\tcompany_id = $3,\n\t\t\tdepartment_id = $4,\n\t\t\tpost_id = $5,\n\t\t\tpost_go_id = $6,\n\t\t\trank_id = $7,\n\t\t\tbirthday = $8,\n\t\t\tnote = $9,\n\t\t\tupdated_at = $10\n\t\tWHERE\n\t\t\tid = $1\n\t`, contact.ID, contact.Name, contact.CompanyID, contact.DepartmentID, contact.PostID, contact.PostGOID, contact.RankID, contact.Birthday,\n\t\tcontact.Note, time.Now())\n\tif err != nil {\n\t\terrmsg(\"ContactUpdate Exec\", err)\n\t\treturn err\n\t}\n\t_ = EmailContactUpdate(contact.ID, contact.Emails)\n\t_ = PhoneContactUpdate(contact.ID, contact.Phones, false)\n\t_ = PhoneContactUpdate(contact.ID, contact.Faxes, true)\n\treturn nil\n}", "func (r *TCPMonitorRepository) UpdateContact(contact MonitoringContact) error {\n\trequestBody := contactWrapper{Contact: contact}\n\trestRequest := rest.Request{Endpoint: fmt.Sprintf(\"/monitoring-contacts/%d\", contact.ID), Body: &requestBody}\n\n\treturn r.Client.Put(restRequest)\n}", "func (s *Service) ContactsUpdate(contactModRequest *model.ContactModRequest) *ContactsUpdateOp {\n\treturn &ContactsUpdateOp{\n\t\tCredential: s.credential,\n\t\tMethod: \"PUT\",\n\t\tPath: \"contacts\",\n\t\tPayload: contactModRequest,\n\t\tAccept: \"application/json\",\n\t\tQueryOpts: make(url.Values),\n\t\tVersion: esign.APIv21,\n\t}\n}", "func (cs *ContactService) Update(id int, contact *Contact) (*PingdomResponse, error) {\n\tif err := contact.Valid(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := cs.client.NewRequest(\"PUT\", \"/api/2.0/notification_contacts/\"+strconv.Itoa(id), contact.PutParams())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tm := &PingdomResponse{}\n\t_, err = cs.client.Do(req, m)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn m, err\n}", "func (h *Handler) UpdateContactByID(c echo.Context) (err error) {\n\t// Check if provided id is a valid object id hex\n\tif ok := bson.IsObjectIdHex(c.Param(\"id\")); !ok {\n\t\treturn c.JSON(http.StatusBadRequest, map[string]string{\n\t\t\t\"message\": \"Invalid id\",\n\t\t})\n\t}\n\tid := bson.ObjectIdHex(c.Param(\"id\"))\n\n\tpayload := make(map[string]string)\n\terr = json.NewDecoder(c.Request().Body).Decode(&payload)\n\tif err != nil {\n\t\treturn &echo.HTTPError{Code: http.StatusBadRequest, Message: \"Invalid / missing fields\"}\n\t}\n\n\t// New name\n\tname, ok := payload[\"name\"]\n\tif !ok {\n\t\treturn &echo.HTTPError{Code: http.StatusBadRequest, Message: \"Invalid / missing fields\"}\n\t}\n\n\tquery := bson.M{\"_id\": id}\n\tupdate := bson.M{\"$set\": bson.M{\"name\": name}}\n\n\t// Update contact in database\n\tdb := h.DB.Clone()\n\tdefer db.Close()\n\tif err = db.DB(\"sampark\").C(\"contacts\").Update(query, update); err != nil {\n\t\treturn\n\t}\n\n\treturn c.JSON(http.StatusOK, map[string]string{\n\t\t\"message\": \"Contact updated\",\n\t})\n}", "func (cl ContactList) UpdateContact(nC *Contact) {\n\tfor indx, contact := range cl.directory {\n\t\tif contact.Name == nC.Name {\n\t\t\tcl.directory[indx].Address = nC.Address\n\t\t\tcl.directory[indx].ExPhrase = nC.ExPhrase\n\t\t\tcl.directory[indx].Name = nC.Name\n\t\t\tcl.directory[indx].Port = nC.Port\n\t\t}\n\t}\n}", "func (cpr *ContactPostgresRepository) EditContact(ctx context.Context, contact Contact) (Contact, error) {\n\tresult, err := cpr.db.ModelContext(ctx, &contact).WherePK().Update()\n\tif err != nil {\n\t\treturn Contact{}, fmt.Errorf(\"postgres repository: edit contact: %w\", err)\n\t}\n\n\tif result.RowsAffected() == 0 {\n\t\treturn Contact{}, nil\n\t}\n\n\treturn contact, nil\n}", "func (a *Client) SafeContactUpdate(params *SafeContactUpdateParams) (*SafeContactUpdateOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewSafeContactUpdateParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"safeContactUpdate\",\n\t\tMethod: \"PUT\",\n\t\tPathPattern: \"/domainSafeContact/user\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &SafeContactUpdateReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*SafeContactUpdateOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for safeContactUpdate: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (cl ContactList) ModifyContact(targetName string) {\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tfmt.Println(\"Cannot Find Contact!\")\n\t\t}\n\t}()\n\n\ttarget := cl.FindContact(targetName)\n\n\tnewName, err := modQuestion(\"Name\")\n\tif err == nil {\n\t\ttarget.Name = newName\n\t}\n\n\tnewIP, err := modQuestion(\"IP\")\n\tif err == nil {\n\t\ttarget.Address = newIP\n\t}\n\n\tnewPort, err := modQuestion(\"Port\")\n\tif err == nil {\n\t\ttarget.Port = newPort\n\t}\n\n\tcl.DisplayContacts()\n\tcl.SaveContacts()\n}", "func PutClientContact(c *gin.Context) {\n\tid := c.Param(\"id\")\n\tvar rowCount int\n\n\tdb := dbcontroller.Database(dbName)\n\tdefer db.Close()\n\n\t// Make sure the client exists first\n\tif err := db.Table(\"client_contacts\").Where(\"id = ?\", id).Count(&rowCount).Error; err != nil {\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"status\": http.StatusNotFound,\n\t\t\t\"message\": err.Error()})\n\t\treturn\n\t}\n\n\tif rowCount == 0 {\n\t\tc.JSON(http.StatusNotFound, gin.H{\n\t\t\t\"status\": http.StatusNotFound,\n\t\t\t\"message\": \"Could not find contact\"})\n\t\treturn\n\t}\n\n\t// Update the client\n\taffectedRows := db.Table(\"client_contacts\").Where(\"id = ?\", id).Update(map[string]interface{}{\n\t\t\"ClientID\": c.PostForm(\"clientId\"),\n\t\t\"FirstName\": c.PostForm(\"firstName\"),\n\t\t\"LastName\": c.PostForm(\"lastName\"),\n\t\t\"Email\": c.PostForm(\"email\"),\n\t\t\"Phone\": c.PostForm(\"phone\"),\n\t\t\"Title\": c.PostForm(\"title\"),\n\t}).RowsAffected\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"status\": http.StatusOK,\n\t\t\"message\": \"Contact has been updated\",\n\t\t\"affectedRows\": affectedRows})\n}", "func (sdk *Sdk) UpdateContactAttr(contactID string, body *Contact) (string, error) {\n\tsdkC := sdk.connect\n\tupdateContact := fmt.Sprintf(\"/api/contacts/%s\", contactID)\n\n\treturn sdkC.rq.PutJSON(updateContact, body)\n}", "func BulkUpdateContact(r *http.Request, ctx types.Context) contract.Response {\n\trequest := &contract.BulkUpdateContactRequest{}\n\tresponse := &contract.BulkUpdateContactResponse{}\n\n\tif err := json.NewDecoder(r.Body).Decode(request); err != nil {\n\t\tlogger.Get().Errorf(\"Unable to parse request body. Err : %v\", err)\n\t\ter := error.ErrBadRequestInvalidBody(err)\n\t\treturn util.FailureResponse(ctx, response, er.HTTPCode, er)\n\t}\n\n\tfor _, c := range request.Contacts {\n\t\tcontact := &c\n\t\tif err := validater.Of(contact).Validate(); err != nil {\n\t\t\tlogger.Get().Errorf(\"Validation error. Err : %v\", err)\n\t\t\treturn util.FailureResponse(ctx, response, err.HTTPCode, err)\n\t\t}\n\t}\n\n\tif err := core.BulkUpdateContact(ctx, request, response); err != nil {\n\t\treturn util.FailureResponse(ctx, response, err.HTTPCode, err)\n\t}\n\n\treturn util.SuccessResponse(ctx, response, http.StatusCreated)\n}", "func (p *BoteaterServiceClient) UpdateContactSetting(ctx context.Context, reqSeq int32, mid string, flag ContactSettingEnum, value string) (err error) {\r\n var _args103 BoteaterServiceUpdateContactSettingArgs\r\n _args103.ReqSeq = reqSeq\r\n _args103.Mid = mid\r\n _args103.Flag = flag\r\n _args103.Value = value\r\n var _result104 BoteaterServiceUpdateContactSettingResult\r\n if err = p.Client_().Call(ctx, \"updateContactSetting\", &_args103, &_result104); err != nil {\r\n return\r\n }\r\n switch {\r\n case _result104.E!= nil:\r\n return _result104.E\r\n }\r\n\r\n return nil\r\n}", "func UpdateCustomerContactInfo(w http.ResponseWriter, req *http.Request) {\n\tfmt.Fprintf(w, \"UpdateCustomerContactInfo\")\n}", "func (contact *Contact) Update() error {\n\treturn DB().Save(contact).Error\n}", "func (c *Contact) UpdateContactRequest(status string) bool {\n\tc.mutex.Lock()\n\tdefer c.mutex.Unlock()\n\n\tif c.data.Request == nil {\n\t\treturn false\n\t}\n\n\tre := c.updateContactRequest(status)\n\n\tevent := ricochet.ContactEvent{\n\t\tType: ricochet.ContactEvent_UPDATE,\n\t\tSubject: &ricochet.ContactEvent_Contact{\n\t\t\tContact: c.Data(),\n\t\t},\n\t}\n\tc.events.Publish(event)\n\n\treturn re\n}", "func DefaultUpdateContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultUpdateContact\")\n\t}\n\tif exists, err := DefaultReadContact(ctx, &Contact{Id: in.GetId()}, db); err != nil {\n\t\treturn nil, err\n\t} else if exists == nil {\n\t\treturn nil, errors.New(\"Contact not found\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\treturn &pbResponse, err\n}", "func (r Dns_Domain_Registration) ModifyContact(contact *datatypes.Container_Dns_Domain_Registration_Contact) (resp bool, err error) {\n\tparams := []interface{}{\n\t\tcontact,\n\t}\n\terr = r.Session.DoRequest(\"SoftLayer_Dns_Domain_Registration\", \"modifyContact\", params, &r.Options, &resp)\n\treturn\n}", "func (c *ClientWithResponses) UpdateContactWithBodyWithResponse(ctx context.Context, contactId string, contentType string, body io.Reader) (*UpdateContactResponse, error) {\n\trsp, err := c.UpdateContactWithBody(ctx, contactId, contentType, body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseUpdateContactResponse(rsp)\n}", "func (api *API) ModifyContact(id string, params *parameters.ModifyContact) (*types.Contacts, error) {\n\tpath := fmt.Sprintf(\"/contacts/%s\", id)\n\n\tbody, err := query.Values(params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdata, err := api.put(path, &body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn types.NewContactsFromJSON(data)\n}", "func (r UpdateContactRequest) Send() (*UpdateContactOutput, error) {\n\terr := r.Request.Send()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn r.Request.Data.(*UpdateContactOutput), nil\n}", "func DefaultUpdateContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, errors.New(\"Nil argument to DefaultUpdateContact\")\n\t}\n\tormObj, err := ConvertContactToORM(*in)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ConvertContactFromORM(ormObj)\n\treturn &pbResponse, err\n}", "func (api HatchbuckClient) CreateContact(contact Contact) (Contact, error) {\n\tvar c Contact\n\tendpoint := fmt.Sprintf(\"%v/contact?api_key=%v\", api.baseURL, api.key)\n\tpayload, _ := json.Marshal(contact)\n\tres, err := http.Post(endpoint, \"application/json\", bytes.NewBuffer(payload))\n\tlog.Println(res.StatusCode)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\tdecoder := json.NewDecoder(res.Body)\n\terr = decoder.Decode(&c)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\treturn c, nil\n}", "func (c *Contact) updateContactRequest(status string) bool {\n\tnow := time.Now().Format(time.RFC3339)\n\t// Whether to keep the channel open\n\tvar re bool\n\n\tswitch status {\n\tcase \"Pending\":\n\t\tc.data.Request.WhenDelivered = now\n\t\tre = true\n\n\tcase \"Accepted\":\n\t\tc.data.Request = nil\n\t\tif c.connection != nil {\n\t\t\tc.data.Status = ricochet.Contact_ONLINE\n\t\t} else {\n\t\t\tc.data.Status = ricochet.Contact_UNKNOWN\n\t\t}\n\n\tcase \"Rejected\":\n\t\tc.data.Request.WhenRejected = now\n\n\tcase \"Error\":\n\t\tc.data.Request.WhenRejected = now\n\t\tc.data.Request.RemoteError = \"error occurred\"\n\n\tdefault:\n\t\tlog.Printf(\"Unknown contact request status '%s'\", status)\n\t}\n\n\tconfig := c.core.Config.Lock()\n\tdefer c.core.Config.Unlock()\n\tconfig.Contacts[c.data.Address] = c.data\n\treturn re\n}", "func (u *User) SetContact(value bool) {\n\tif value {\n\t\tu.Flags.Set(11)\n\t\tu.Contact = true\n\t} else {\n\t\tu.Flags.Unset(11)\n\t\tu.Contact = false\n\t}\n}", "func TestSetContact(t *testing.T) {\n\tvar client *RedisFakeClient\n\taddressbook := d.AddressBook{}\n\tcontact := c.Contact{\"Jonas\",\"[email protected]\", \"0791234567\", c.Address{\"12 Fake Street\", \"Fake City\", \"FA\"}}\n\tok, errorCode := addressbook.SetContact(client, contact, 1);\n\tif !ok {\n\t\tt.Fail()\n\t}\n\tt.Logf(\"Can set contact: %s, ok: %t\", errorCode, ok)\n}", "func (b *Bot) SendContact(request axon.O) (result axon.O, err error) {\n\tvar response interface{}\n\tif response, err = b.doPost(\"sendContact\", request); err == nil {\n\t\tresult = response.(map[string]interface{})\n\t}\n\treturn\n}", "func DefaultStrictUpdateContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultCascadedUpdateContact\")\n\t}\n\tormObj, err := ConvertContactToORM(*in)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ConvertContactFromORM(ormObj)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, nil\n}", "func NewUpdateContactRequestWithBody(server string, contactId string, contentType string, body io.Reader) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParam(\"simple\", false, \"contact_id\", contactId)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tqueryUrl, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbasePath := fmt.Sprintf(\"/domain/v2alpha2/contacts/%s\", pathParam0)\n\tif basePath[0] == '/' {\n\t\tbasePath = basePath[1:]\n\t}\n\n\tqueryUrl, err = queryUrl.Parse(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"PATCH\", queryUrl.String(), body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq.Header.Add(\"Content-Type\", contentType)\n\treturn req, nil\n}", "func (s *ContactsServiceImpl) ContactsDeleteContact(ctx context.Context, request *mtproto.TLContactsDeleteContact) (*mtproto.Contacts_Link, error) {\n\tmd := grpc_util.RpcMetadataFromIncoming(ctx)\n\tglog.Infof(\"contacts.deleteContact#8e953744 - metadata: %s, request: %s\", logger.JsonDebugData(md), logger.JsonDebugData(request))\n\n\tvar (\n\t\tdeleteId int32\n\t\tid = request.Id\n\t)\n\n\tswitch id.GetConstructor() {\n\tcase mtproto.TLConstructor_CRC32_inputUserSelf:\n\t\tdeleteId = md.UserId\n\tcase mtproto.TLConstructor_CRC32_inputUser:\n\t\t// Check access hash\n\t\tif ok := s.UserModel.CheckAccessHashByUserId(id.GetData2().GetUserId(), id.GetData2().GetAccessHash()); !ok {\n\t\t\t// TODO(@benqi): Add ACCESS_HASH_INVALID codes\n\t\t\terr := mtproto.NewRpcError2(mtproto.TLRpcErrorCodes_BAD_REQUEST)\n\t\t\tglog.Error(err, \": is access_hash error\")\n\t\t\treturn nil, err\n\t\t}\n\n\t\tdeleteId = id.GetData2().GetUserId()\n\t\t// TODO(@benqi): contact exist\n\tdefault:\n\t\t// mtproto.TLConstructor_CRC32_inputUserEmpty:\n\t\terr := mtproto.NewRpcError2(mtproto.TLRpcErrorCodes_BAD_REQUEST)\n\t\tglog.Error(err, \": is inputUserEmpty\")\n\t\treturn nil, err\n\t}\n\n\t// selfUser := user2.GetUserById(md.UserId, md.UserId)\n\tdeleteUser := s.UserModel.GetUserById(md.UserId, deleteId)\n\n\tcontactLogic := s.ContactModel.MakeContactLogic(md.UserId)\n\tneedUpdate := contactLogic.DeleteContact(deleteId, deleteUser.Data2.MutualContact)\n\n\tselfUpdates := updates2.NewUpdatesLogic(md.UserId)\n\tmyLink, foreignLink := s.UserModel.GetContactLink(md.UserId, deleteId)\n\tcontactLink := &mtproto.TLUpdateContactLink{Data2: &mtproto.Update_Data{\n\t\tUserId: deleteId,\n\t\tMyLink: myLink,\n\t\tForeignLink: foreignLink,\n\t}}\n\tselfUpdates.AddUpdate(contactLink.To_Update())\n\tselfUpdates.AddUser(deleteUser)\n\n\t// TODO(@benqi): handle seq\n\tsync_client.GetSyncClient().SyncUpdatesNotMe(md.UserId, md.AuthId, selfUpdates.ToUpdates())\n\n\t// TODO(@benqi): 推给联系人逻辑需要再考虑考虑\n\tif needUpdate {\n\t\t// TODO(@benqi): push to contact user update contact link\n\t\tcontactUpdates := updates2.NewUpdatesLogic(deleteUser.Data2.Id)\n\t\tmyLink, foreignLink := s.UserModel.GetContactLink(deleteId, md.UserId)\n\t\tcontactLink2 := &mtproto.TLUpdateContactLink{Data2: &mtproto.Update_Data{\n\t\t\tUserId: md.UserId,\n\t\t\tMyLink: myLink,\n\t\t\tForeignLink: foreignLink,\n\t\t}}\n\t\tcontactUpdates.AddUpdate(contactLink2.To_Update())\n\n\t\tselfUser := s.UserModel.GetUserById(deleteId, md.UserId)\n\t\tcontactUpdates.AddUser(selfUser)\n\t\tsync_client.GetSyncClient().PushUpdates(deleteId, contactUpdates.ToUpdates())\n\t}\n\n\t////////////////////////////////////////////////////////////////////////////////////////\n\tcontactsLink := &mtproto.TLContactsLink{Data2: &mtproto.Contacts_Link_Data{\n\t\tMyLink: contactLink.Data2.MyLink,\n\t\tForeignLink: contactLink.Data2.ForeignLink,\n\t\tUser: deleteUser,\n\t}}\n\n\tglog.Infof(\"contacts.deleteContact#8e953744 - reply: %s\", logger.JsonDebugData(contactsLink))\n\treturn contactsLink.To_Contacts_Link(), nil\n}", "func (s *DescribeContactOutput) SetContact(v *Contact) *DescribeContactOutput {\n\ts.Contact = v\n\treturn s\n}", "func (a *ShippingContactsApiService) UpdateCustomerShippingContacts(ctx context.Context, id string, shippingContactsId string) ApiUpdateCustomerShippingContactsRequest {\n\treturn ApiUpdateCustomerShippingContactsRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tid: id,\n\t\tshippingContactsId: shippingContactsId,\n\t}\n}", "func (o *ContactResponse) SetContact(v []Contact) {\n\to.Contact = &v\n}", "func (c Client) UpdateAccount(account Account, contact ...string) (Account, error) {\n\tvar updateAccountReq interface{}\n\n\tif !reflect.DeepEqual(account.Contact, contact) {\n\t\t// Only provide a non-nil updateAccountReq when there is an update to be made.\n\t\tupdateAccountReq = struct {\n\t\t\tContact []string `json:\"contact,omitempty\"`\n\t\t}{\n\t\t\tContact: contact,\n\t\t}\n\t} else {\n\t\t// Otherwise use \"\" to trigger a POST-as-GET to fetch up-to-date account\n\t\t// information from the acme service.\n\t\tupdateAccountReq = \"\"\n\t}\n\n\t_, err := c.post(account.URL, account.URL, account.PrivateKey, updateAccountReq, &account, http.StatusOK)\n\tif err != nil {\n\t\treturn account, err\n\t}\n\n\tif account.Thumbprint == \"\" {\n\t\taccount.Thumbprint, err = JWKThumbprint(account.PrivateKey.Public())\n\t\tif err != nil {\n\t\t\treturn account, fmt.Errorf(\"acme: error computing account thumbprint: %v\", err)\n\t\t}\n\t}\n\n\treturn account, nil\n}", "func DefaultStrictUpdateContact(ctx context.Context, in *Contact, db *gorm.DB) (*Contact, error) {\n\tif in == nil {\n\t\treturn nil, fmt.Errorf(\"Nil argument to DefaultCascadedUpdateContact\")\n\t}\n\tormObj, err := in.ToORM(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfilterObjEmail := EmailORM{}\n\tif ormObj.Id == 0 {\n\t\treturn nil, errors.New(\"Can't do overwriting update with no 'Id' value for FK of field 'Emails'\")\n\t}\n\tfilterObjEmail.ContactId = ormObj.Id\n\tif err = db.Where(filterObjEmail).Delete(Email{}).Error; err != nil {\n\t\treturn nil, err\n\t}\n\taccountID, err := auth.GetAccountID(ctx, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdb = db.Where(&ContactORM{AccountID: accountID})\n\tif err = db.Save(&ormObj).Error; err != nil {\n\t\treturn nil, err\n\t}\n\tpbResponse, err := ormObj.ToPB(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &pbResponse, nil\n}", "func (r *CompanyRequest) Update(ctx context.Context, reqObj *Company) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func NewUpdateContactRequest(server string, contactId string, body UpdateContactJSONRequestBody) (*http.Request, error) {\n\tvar bodyReader io.Reader\n\tbuf, err := json.Marshal(body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbodyReader = bytes.NewReader(buf)\n\treturn NewUpdateContactRequestWithBody(server, contactId, \"application/json\", bodyReader)\n}", "func insertContact(r *http.Request, svc *mirror.Service) string {\n\tc := appengine.NewContext(r)\n\tc.Infof(\"Inserting contact\")\n\tname := r.FormValue(\"name\")\n\timageUrl := r.FormValue(\"imageUrl\")\n\tif name == \"\" || imageUrl == \"\" {\n\t\treturn \"Must specify imageUrl and name to insert contact\"\n\t}\n\tif strings.HasPrefix(imageUrl, \"/\") {\n\t\timageUrl = fullURL(r.Host, imageUrl)\n\t}\n\n\tbody := mirror.Contact{\n\t\tDisplayName: name,\n\t\tId: strings.Replace(name, \" \", \"_\", -1),\n\t\tImageUrls: []string{imageUrl},\n\t}\n\n\tif _, err := svc.Contacts.Insert(&body).Do(); err != nil {\n\t\treturn fmt.Sprintf(\"Unable to insert contact: %s\", err)\n\t}\n\treturn fmt.Sprintf(\"Inserted contact: %s\", name)\n}", "func (r *CompanyInformationRequest) Update(ctx context.Context, reqObj *CompanyInformation) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (n *PeernotifyNode) saveContact(key []byte, contact *pb.Contact) error {\n\treturn storeContact(n.Contacts, key, contact)\n}", "func (*UpdateContactRequest) Descriptor() ([]byte, []int) {\n\treturn file_google_cloud_essentialcontacts_v1_service_proto_rawDescGZIP(), []int{6}\n}", "func (d *DynamoConn) AddContact(userID string, contact *models.Contact) error {\n\tvar ContactsUpdate struct {\n\t\tContacts []*models.Contact `json:\":c\"`\n\t}\n\n\t// Retrieve user from database\n\tuser, err := d.GetUser(userID)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Add the new contact to the slice and marshal the update\n\tContactsUpdate.Contacts = append(user.Contacts, contact)\n\n\texpr, err := dynamodbattribute.MarshalMap(ContactsUpdate)\n\tif err != nil {\n\t\treturn err\n\n\t}\n\n\t// Define table schema's key\n\tkey := map[string]*dynamodb.AttributeValue{\n\t\t\"user_id\": {\n\t\t\tS: aws.String(user.UserID),\n\t\t},\n\t}\n\n\t// Use marshalled map for UpdateItemInput\n\titem := &dynamodb.UpdateItemInput{\n\t\tExpressionAttributeValues: expr,\n\t\tTableName: aws.String(common.UsersTableName),\n\t\tKey: key,\n\t\tReturnValues: aws.String(\"UPDATED_NEW\"),\n\t\tUpdateExpression: aws.String(\"set contacts = :c\"),\n\t}\n\n\t// Invoke the update\n\t_, err = d.Client.UpdateItem(item)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (o *UpdateContactMethodParams) WithContext(ctx context.Context) *UpdateContactMethodParams {\n\to.SetContext(ctx)\n\treturn o\n}", "func Test_AddContact(t *testing.T) {\n\t// SETUP:\n\t// A standard Env. defer is used to ensure the env is cleaned up after the test.\n\tenv := test.SetupEnv(t)\n\tdefer env.Close()\n\n\t// TEST: Adding a contact via the API.\n\tcontact, err := env.Client.AddContact(service.AddContactRequest{\n\t\tEmail: \"[email protected]\",\n\t\tName: \"Alice Zulu\",\n\t})\n\n\t// VERIFY: Response contains the contact\n\trequire.NoError(t, err, \"Unable to get contact via API\")\n\trequire.NotEmpty(t, contact, \"Contact not found\")\n\tassert.True(t, contact.Id > 0, \"Contact ID is missing\")\n\tassert.Equal(t, contact.Email, \"[email protected]\")\n\tassert.Equal(t, contact.Name, \"Alice Zulu\")\n\n\t// VERIFY: Contact is added to the database properly.\n\tdbContact := env.ReadContactWithEmail(\"[email protected]\")\n\trequire.NotEmpty(t, dbContact, \"Contact not found\")\n\tassert.Equal(t, dbContact.Email, \"[email protected]\")\n\tassert.Equal(t, dbContact.Name, \"Alice Zulu\")\n}", "func AddContact(body ContactBody, token string) (ContactReturn, error) {\n\n\t// Convert data\n\tconvert, err := json.Marshal(body)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Set config for new request\n\tr := Request{\"/contacts\", \"POST\", token, convert}\n\n\t// Send new request\n\tresponse, err := r.Send()\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Close response body after function ends\n\tdefer response.Body.Close()\n\n\t// Decode data\n\tvar decode ContactReturn\n\n\terr = json.NewDecoder(response.Body).Decode(&decode)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Return data\n\treturn decode, nil\n\n}", "func RegisterContactAPI(router *httprouter.Router, opt *Options) {\n\t// Validation\n\tvar err error\n\tswitch {\n\tcase router == nil:\n\t\terr = errors.New(\"router must not be nil\")\n\tcase strings.TrimSpace(opt.RootDir) == \"\":\n\t\terr = errors.New(\"root directory must not be empty\")\n\tcase strings.TrimSpace(opt.FilePrefix) == \"\":\n\t\terr = errors.New(\"file prefix must not be empty\")\n\tcase opt.Revision == 0:\n\t\terr = errors.New(\"revision must not be 0\")\n\t}\n\thandleError(err)\n\n\tc := &contactAPI{\n\t\trootDir: opt.RootDir,\n\t\tfilePrefix: opt.FilePrefix,\n\t\trevision: opt.Revision,\n\t\tmu: &sync.RWMutex{},\n\t\tcontacts: make(map[int]*ContactData, 0),\n\t\tcountiesMap: make(map[string]int, 0),\n\t}\n\n\t// read from file\n\tfile, err := os.Open(filepath.Join(opt.RootDir, fmt.Sprintf(\"%s-v%d.json\", opt.FilePrefix, opt.Revision)))\n\thandleError(err)\n\tdefer file.Close()\n\n\t// update data from file\n\tcontact := &ContactData{}\n\terr = json.NewDecoder(file).Decode(contact)\n\thandleError(err)\n\n\t// get json\n\tbs, err := json.Marshal(contact)\n\thandleError(err)\n\n\t// Update contacts map\n\tfor index, countyHotline := range contact.CountiesHotlines {\n\t\tc.countiesMap[countyHotline.County] = index\n\t}\n\n\t// add the contact only if it doesn't exist\n\t_, err = revisionManager.Get(contactGroup, contact.Revision)\n\tif gorm.IsRecordNotFoundError(err) {\n\t\terr = revisionManager.Add(&revision{\n\t\t\tRevision: contact.Revision,\n\t\t\tResourceGroup: contactGroup,\n\t\t\tData: bs,\n\t\t})\n\t\thandleError(err)\n\t}\n\n\tdur := time.Duration(int(30*time.Minute) + rand.Intn(30))\n\n\tgo updateRevisionWorker(dur, func() {\n\t\t// get new revision\n\t\trevisions, err := revisionManager.List(contactGroup)\n\t\tif err != nil {\n\t\t\tlogrus.Infof(\"failed to list revisions from database: %v\", err)\n\t\t\treturn\n\t\t}\n\n\t\t// Update Map\n\t\tc.mu.Lock()\n\t\tdefer c.mu.Unlock()\n\n\t\tfor _, revision := range revisions {\n\t\t\tcontact := &ContactData{}\n\t\t\terr = json.Unmarshal(revision.Data, contact)\n\t\t\tif err != nil {\n\t\t\t\tlogrus.Infof(\"failed to unmarshal revision: %v\", err)\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tc.contacts[revision.Revision] = contact\n\t\t\tc.revision = revision.Revision\n\t\t}\n\n\t\tlogrus.Infoln(\"Contacts updated\")\n\t})\n\n\t// Update endpoints\n\trouter.GET(\"/rest/v1/contacts\", c.GetContact)\n\trouter.PUT(\"/rest/v1/contacts\", c.UpdateContact)\n\trouter.GET(\"/rest/v1/contacts/hotlines/nearest\", c.GetNearestHotlines)\n\trouter.GET(\"/rest/v1/contacts/hotlines/county\", c.GetCountyHotlines)\n}", "func handleContact(resp http.ResponseWriter, request *http.Request) {\n\tcors := handleCors(resp, request)\n\tif cors {\n\t\treturn\n\t}\n\n\tbody, err := ioutil.ReadAll(request.Body)\n\tif err != nil {\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false, \"reason\": \"%s\"}`, err)))\n\t\treturn\n\t}\n\n\tvar t Contact\n\terr = json.Unmarshal(body, &t)\n\tif err != nil {\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false, \"reason\": \"%s\"}`, err)))\n\t\treturn\n\t}\n\n\tif len(t.Email) < 3 || len(t.Message) == 0 {\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false, \"reason\": \"Please fill a valid email and message\"}`)))\n\t\treturn\n\t}\n\n\tctx := context.Background()\n\tmailContent := fmt.Sprintf(\"Firsname: %s\\nLastname: %s\\nTitle: %s\\nCompanyname: %s\\nPhone: %s\\nEmail: %s\\nMessage: %s\", t.Firstname, t.Lastname, t.Title, t.Companyname, t.Phone, t.Email, t.Message)\n\tlog.Printf(\"Sending contact from %s\", t.Email)\n\n\tmsg := &mail.Message{\n\t\tSender: \"Shuffle <[email protected]>\",\n\t\tTo: []string{\"[email protected]\"},\n\t\tSubject: \"Shuffler.io - New contact form\",\n\t\tBody: mailContent,\n\t}\n\n\tif err := mail.Send(ctx, msg); err != nil {\n\t\tlog.Printf(\"Couldn't send email: %v\", err)\n\t}\n\n\tresp.WriteHeader(200)\n\tresp.Write([]byte(fmt.Sprintf(`{\"success\": true, \"message\": \"Thanks for reaching out. We will contact you soon!\"}`)))\n}", "func (a *ShippingContactsApiService) UpdateCustomerShippingContactsExecute(r ApiUpdateCustomerShippingContactsRequest) (*CustomerShippingContactsResponse, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPut\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *CustomerShippingContactsResponse\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"ShippingContactsApiService.UpdateCustomerShippingContacts\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/customers/{id}/shipping_contacts/{shipping_contacts_id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", url.PathEscape(parameterValueToString(r.id, \"id\")), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"shipping_contacts_id\"+\"}\", url.PathEscape(parameterValueToString(r.shippingContactsId, \"shippingContactsId\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.customerUpdateShippingContacts == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"customerUpdateShippingContacts is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/vnd.conekta-v2.1.0+json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.acceptLanguage != nil {\n\t\tparameterAddToHeaderOrQuery(localVarHeaderParams, \"Accept-Language\", r.acceptLanguage, \"\")\n\t}\n\tif r.xChildCompanyId != nil {\n\t\tparameterAddToHeaderOrQuery(localVarHeaderParams, \"X-Child-Company-Id\", r.xChildCompanyId, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.customerUpdateShippingContacts\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := io.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 422 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 500 {\n\t\t\tvar v ModelError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\t\t\tnewErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v)\n\t\t\t\t\tnewErr.model = v\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func deleteContact(r *http.Request, svc *mirror.Service) string {\n\tid := strings.Replace(r.FormValue(\"id\"), \" \", \"_\", -1)\n\n\tif err := svc.Contacts.Delete(id).Do(); err != nil {\n\t\treturn fmt.Sprintf(\"Unable to delete contact: %s\", err)\n\t}\n\treturn \"Contact has been deleted.\"\n}", "func UpdateCustomer(c *gin.Context) {\n\t// Get model if exist\n\tvar customer models.Customer\n\tif err := utils.DB.Where(\"id = ?\", c.Param(\"id\")).First(&customer).Error; err != nil {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": \"Record not found!\"})\n\t\treturn\n\t}\n\n\t// Validate input\n\tvar input models.Customer\n\tif err := c.ShouldBindJSON(&input); err != nil {\n\t\tfmt.Println(err)\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": \"could not decode data\"})\n\t\treturn\n\t}\n\n\tutils.DB.Model(&customer).Updates(input)\n\n\tc.JSON(http.StatusOK, gin.H{\"data\": customer})\n}", "func CreateContact(w http.ResponseWriter, r *http.Request) {\n\t// Grab the id of the user that send the request\n\tuser := r.Context().Value(middleware.User(\"user\")).(uint)\n\tcontact := &model.Contact{}\n\n\terr := json.NewDecoder(r.Body).Decode(contact)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\tresponse.RespondWithStatus(\n\t\t\tw,\n\t\t\tresponse.Message(false, \"Error while decoding request body\"),\n\t\t\thttp.StatusBadRequest,\n\t\t)\n\t\treturn\n\t}\n\n\tcontact.UserID = user\n\tdata, status, ok := contact.Create()\n\tif !ok {\n\t\tresponse.RespondWithStatus(w, data, status)\n\t}\n\tresponse.RespondWithStatus(w, data, status)\n}", "func (app *Application) ContactDelete(w http.ResponseWriter, r *http.Request) {\n\tvar err error\n\tvar id string\n\tvar stmt *sql.Stmt\n\n\t// NOTES(cixtor): r.ParseForm doesn’t works with DELETE requests.\n\tif id = r.URL.Query().Get(\"id\"); id == \"\" {\n\t\tfail(w, r, fmt.Errorf(\"missing `id` form value\"))\n\t\treturn\n\t}\n\n\tif stmt, err = app.db.Prepare(\"DELETE FROM contacts WHERE id=?\"); err != nil {\n\t\tfail(w, r, err)\n\t\treturn\n\t}\n\n\tif _, err = stmt.Exec(id); err != nil {\n\t\tfail(w, r, err)\n\t\treturn\n\t}\n\n\tif _, err = contactRead(app.db, id); err == nil {\n\t\tfail(w, r, fmt.Errorf(\"account was not deleted\"))\n\t\treturn\n\t}\n\n\twrite(w, r, Response{Ok: true})\n}", "func (d *DynamoConn) UpdateTier(userID string, phoneNumber string, tier common.AlertTier) error {\n\tvar ContactsUpdate struct {\n\t\tContacts []*models.Contact `json:\":c\"`\n\t}\n\n\t// Retrieve user from database\n\tuser, err := d.GetUser(userID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Search for the contact matching the provided phone number\n\tseen := false\n\tfor _, contact := range user.Contacts {\n\t\tif contact.PhoneNumber == phoneNumber {\n\t\t\tcontact.Tier = tier\n\t\t\tseen = true\n\t\t\tbreak\n\t\t}\n\t}\n\n\t// Hacky way to check if the contact was found\n\tif !seen {\n\t\treturn fmt.Errorf(\"unable to find contact with phone number %v for user %v\", phoneNumber, userID)\n\t}\n\n\t// Good enough until we figure out a way to update a single contact\n\tContactsUpdate.Contacts = user.Contacts\n\n\texpr, err := dynamodbattribute.MarshalMap(ContactsUpdate)\n\tif err != nil {\n\t\treturn err\n\n\t}\n\n\t// Define table schema's key\n\tkey := map[string]*dynamodb.AttributeValue{\n\t\t\"user_id\": {\n\t\t\tS: aws.String(user.UserID),\n\t\t},\n\t}\n\n\t// Use marshalled map for UpdateItemInput\n\t// TODO: update contacts to be map[string]Contact where\n\t// the key is the phone number. This way we wont need to update the entire attribute\n\titem := &dynamodb.UpdateItemInput{\n\t\tExpressionAttributeValues: expr,\n\t\tTableName: aws.String(common.UsersTableName),\n\t\tKey: key,\n\t\tReturnValues: aws.String(\"UPDATED_NEW\"),\n\t\tUpdateExpression: aws.String(\"set contacts = :c\"),\n\t}\n\n\t// Invoke the update\n\t_, err = d.Client.UpdateItem(item)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (a *Account) Update(b Account) {\n\tif len(b.Contact) > 0 {\n\t\ta.Contact = b.Contact\n\t}\n}", "func (s UserSet) SetContactAddress(value string) {\n\ts.RecordCollection.Set(models.NewFieldName(\"ContactAddress\", \"contact_address\"), value)\n}", "func (api HatchbuckClient) SearchContact(criteria SearchCriteria) ([]Contact, error) {\n\tvar c []Contact\n\tendpoint := fmt.Sprintf(\"%v/contact/search?api_key=%v\", api.baseURL, api.key)\n\tpayload, _ := json.Marshal(criteria)\n\tres, err := http.Post(endpoint, \"application/json\", bytes.NewBuffer(payload))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdecoder := json.NewDecoder(res.Body)\n\terr = decoder.Decode(&c)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn c, nil\n}", "func (c *cloudChannelRESTClient) UpdateCustomer(ctx context.Context, req *channelpb.UpdateCustomerRequest, opts ...gax.CallOption) (*channelpb.Customer, error) {\n\tm := protojson.MarshalOptions{AllowPartial: true, UseEnumNumbers: true}\n\tbody := req.GetCustomer()\n\tjsonReq, err := m.Marshal(body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbaseUrl, err := url.Parse(c.endpoint)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tbaseUrl.Path += fmt.Sprintf(\"/v1/%v\", req.GetCustomer().GetName())\n\n\tparams := url.Values{}\n\tparams.Add(\"$alt\", \"json;enum-encoding=int\")\n\tif req.GetUpdateMask() != nil {\n\t\tupdateMask, err := protojson.Marshal(req.GetUpdateMask())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tparams.Add(\"updateMask\", string(updateMask[1:len(updateMask)-1]))\n\t}\n\n\tbaseUrl.RawQuery = params.Encode()\n\n\t// Build HTTP headers from client and context metadata.\n\thds := []string{\"x-goog-request-params\", fmt.Sprintf(\"%s=%v\", \"customer.name\", url.QueryEscape(req.GetCustomer().GetName()))}\n\n\thds = append(c.xGoogHeaders, hds...)\n\thds = append(hds, \"Content-Type\", \"application/json\")\n\theaders := gax.BuildHeaders(ctx, hds...)\n\topts = append((*c.CallOptions).UpdateCustomer[0:len((*c.CallOptions).UpdateCustomer):len((*c.CallOptions).UpdateCustomer)], opts...)\n\tunm := protojson.UnmarshalOptions{AllowPartial: true, DiscardUnknown: true}\n\tresp := &channelpb.Customer{}\n\te := gax.Invoke(ctx, func(ctx context.Context, settings gax.CallSettings) error {\n\t\tif settings.Path != \"\" {\n\t\t\tbaseUrl.Path = settings.Path\n\t\t}\n\t\thttpReq, err := http.NewRequest(\"PATCH\", baseUrl.String(), bytes.NewReader(jsonReq))\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\thttpReq = httpReq.WithContext(ctx)\n\t\thttpReq.Header = headers\n\n\t\thttpRsp, err := c.httpClient.Do(httpReq)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer httpRsp.Body.Close()\n\n\t\tif err = googleapi.CheckResponse(httpRsp); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tbuf, err := io.ReadAll(httpRsp.Body)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif err := unm.Unmarshal(buf, resp); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn nil\n\t}, opts...)\n\tif e != nil {\n\t\treturn nil, e\n\t}\n\treturn resp, nil\n}", "func CreateContact(w http.ResponseWriter, r *http.Request) {\n\n\terr := r.ParseForm()\n\n\tif err != nil {\n\t\tsetting.Renderer.JSON(w, http.StatusBadRequest, err)\n\t}\n\n\tcontact := new(models.Contact)\n\tcontact.ID = bson.NewObjectId()\n\n\tdecoder := schema.NewDecoder()\n\terr = decoder.Decode(contact, r.PostForm)\n\n\tif err != nil {\n\t\tapi.RenderError(w, http.StatusBadRequest, err)\n\n\t\treturn\n\t}\n\n\terr = store.GetDB().Save(contact)\n\n\tif err != nil {\n\t\tapi.RenderError(w, http.StatusBadRequest, err)\n\n\t\treturn\n\t}\n\n\tsetting.Renderer.JSON(w, http.StatusOK, contact)\n}", "func (o *UpdateContactMethodParams) WithHTTPClient(client *http.Client) *UpdateContactMethodParams {\n\to.SetHTTPClient(client)\n\treturn o\n}", "func updateCustomer(c *gin.Context) {\r\n\tt0 := time.Now().UnixNano()\r\n\tvar customer customerModel\r\n\tcustID := c.Param(\"id\")\r\n\r\n\tdb.First(&customer, custID)\r\n\r\n\tif customer.ID == 0 {\r\n\t\tc.JSON(http.StatusNotFound, gin.H{\"status\": http.StatusNotFound, \"message\": \"No customer found!\"})\r\n\t\treturn\r\n\t}\r\n\r\n\tif c.Query(\"firstName\") != \"\" {\r\n\t\tdb.Model(&customer).Update(\"firstName\", c.Query(\"firstName\"))\r\n\t}\r\n\tif c.Query(\"lastName\") != \"\" {\r\n\t\tdb.Model(&customer).Update(\"lastName\", c.Query(\"lastName\"))\r\n\t}\r\n\tif c.Query(\"email\") != \"\" {\r\n\t\tdb.Model(&customer).Update(\"email\", c.Query(\"email\"))\r\n\t}\r\n\r\n\t//create in mongo\r\n\tr := c.Request\r\n\tr.ParseForm()\r\n\tfile, handler, err := r.FormFile(\"addressProof\")\r\n\tif err != nil {\r\n\t\tfmt.Println(\"Error Retrieving the File\")\r\n\t\tfmt.Println(err)\r\n\t} else {\r\n\r\n\t\tfmt.Printf(\"Uploaded File: %+v\\n\", handler.Filename)\r\n\t\tfmt.Printf(\"File Size: %+v\\n\", handler.Size)\r\n\t\tfmt.Printf(\"MIME Header: %+v\\n\", handler.Header)\r\n\r\n\t\t// read all of the contents of our uploaded file into a\r\n\t\t// byte array\r\n\t\tfileBytes, err := ioutil.ReadAll(file)\r\n\t\tif err != nil {\r\n\t\t\tfmt.Println(err)\r\n\t\t}\r\n\r\n\t\tdeleteFileFromMongo(fmt.Sprint(customer.ID))\r\n\t\tsaveFileToMongo(http.DetectContentType(fileBytes), handler.Filename, fmt.Sprint(customer.ID), fileBytes)\r\n\t\tdefer file.Close()\r\n\t}\r\n\r\n\tdeleteHandler(\"customer_\" + custID)\r\n\tc.JSON(http.StatusOK, gin.H{\"status\": http.StatusOK, \"message\": \"Customer updated successfully!\",\r\n\t\t\"time\": fmt.Sprint(timeSpent(t0)) + \" ms\"})\r\n}", "func (c ClientFake) UpdateCampaign(id, name string) (Campaign, error) {\n\treturn Campaign{}, nil\n}", "func (c *client) SendContact(args SendContactArgs) (Message, TelegramError) {\n\treturn c.sendJSONMessage(args)\n}", "func (d UserData) SetContactAddress(value string) m.UserData {\n\td.ModelData.Set(models.NewFieldName(\"ContactAddress\", \"contact_address\"), value)\n\treturn d\n}", "func (o *UpdateContactMethodParams) WithUser(user *models.UpdateContactMethodParamsBody) *UpdateContactMethodParams {\n\to.SetUser(user)\n\treturn o\n}", "func (u *GameServerUpsertOne) UpdateLastContactAt() *GameServerUpsertOne {\n\treturn u.Update(func(s *GameServerUpsert) {\n\t\ts.UpdateLastContactAt()\n\t})\n}", "func (op *ContactsUpdateOp) Do(ctx context.Context) (*model.ContactUpdateResponse, error) {\n\tvar res *model.ContactUpdateResponse\n\treturn res, ((*esign.Op)(op)).Do(ctx, &res)\n}", "func Updatecustomer(c *gin.Context) {\r\n\tid := c.Params.ByName(\"id\")\r\n\tvar customer models.Customer\r\n\tvar resp models.Response\r\n\tvar flag bool = false\r\n\tif id != \"\" {\r\n\t\tc.BindJSON(&customer)\r\n\t\tp, err := models.Askdata()\r\n\t\tif err != nil {\r\n\t\t\tc.AbortWithStatus(http.StatusInternalServerError)\r\n\t\t} else {\r\n\r\n\t\t\tfor i, val := range p {\r\n\t\t\t\tif val.Id == id {\r\n\t\t\t\t\tp[i].Name = customer.Name\r\n\t\t\t\t\tp[i].City = customer.City\r\n\t\t\t\t\tp[i].Branch = customer.Branch\r\n\t\t\t\t\tflag = true\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tif flag == true {\r\n\t\t\t\tmodels.Setdata(p)\r\n\t\t\t\tresp.Status = \"success\"\r\n\t\t\t\tresp.Message = \"The customer entry is updated successfully\"\r\n\t\t\t\tresp.Data = append(resp.Data, customer)\r\n\t\t\t\tc.JSON(http.StatusOK, resp)\r\n\t\t\t}\r\n\r\n\t\t}\r\n\r\n\t} else {\r\n\t\tresp.Status = \"Error\"\r\n\t\tresp.Message = \"The request is not correct\"\r\n\t\tc.JSON(http.StatusBadRequest, resp)\r\n\t}\r\n\r\n}", "func (c *RBController) Contact(w http.ResponseWriter, r *http.Request) (err error) {\n\tc.HTML(w, http.StatusOK, \"contact\", nil)\n\treturn nil\n}", "func (c client) StoreContacts(contacts []*Contact, lists []string) error {\n\tentity := storeContactsRequest{\n\t\tContacts: contacts,\n\t\tLists: lists,\n\t}\n\n\tdata, err := json.Marshal(entity)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq, err := http.NewRequest(\"PUT\", urls.Join(c.base, \"/marketing/contacts\"), bytes.NewReader(data))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t_, data, err = c.Send(req)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (c *Client) SearchContact(contactName string) (string, error) {\n\tif !c.authenticated {\n\t\treturn \"\", errors.New(\"Not authenticated. Call Authenticate first\")\n\t}\n\n\tpayload := make(map[string]interface{})\n\tpayload[\"contact_name\"] = contactName\n\tmsg := common.NewMessage(c.userId, \"server\",\n\t\t\"control\", \"seach_contact\", time.Time{},\n\t\tcommon.TEXT, payload)\n\n\tencoded, err := msg.Json()\n\tif err != nil {\n\t\tlog.Println(\"Failed to encode search contact message\", err)\n\t\treturn \"\", err\n\t}\n\n\terr = c.transport.SendText(string(encoded))\n\tif err != nil {\n\t\tlog.Println(\"Failed to send search contact message\", err)\n\t\treturn \"\", err\n\t}\n\n\tresp := <-c.Out\n\tif resp.Status() == common.STATUS_ERROR {\n\t\terrMsg := resp.Error()\n\t\tlog.Println(\"Search contact response error\", errMsg)\n\t\treturn \"\", errors.New(errMsg)\n\t}\n\n\tcontactId := resp.GetJsonData(\"contact_id\").(string)\n\treturn contactId, nil\n}", "func (u *GameServerUpsert) UpdateLastContactAt() *GameServerUpsert {\n\tu.SetExcluded(gameserver.FieldLastContactAt)\n\treturn u\n}", "func (contact *Contact) CreateContact(accountId uint) map[string]interface{} {\n\t// check for empty data\n\tif contact.PhoneNumber == \"\" || contact.Email == \"\" {\n\t\treturn utl.Message(102, \"the following fields are required: phone_number and email\")\n\t}\n\n\t// validate email\n\tif err := checkmail.ValidateFormat(contact.Email); err != nil {\n\t\treturn utl.Message(102, \"email address is not valid\")\n\t}\n\n\t// validate phone number\n\t// should not be less than 9 digits and more than 12 chars\n\t// accepted: 0712345678, 254712345678, 712345678\n\t// store: 712345678\n\tif len(contact.PhoneNumber) > 12 || len(contact.PhoneNumber) < 9 {\n\t\treturn utl.Message(102, \"enter a valid phone number, between 9 to 12 digits.\")\n\t}\n\n\tif strings.HasPrefix(contact.PhoneNumber, \"254\") {\n\t\tphoneNumber_ := contact.PhoneNumber[3:len(contact.PhoneNumber)]\n\t\tcontact.PhoneNumber = phoneNumber_\n\t}\n\n\tif strings.HasPrefix(contact.PhoneNumber, \"0\") {\n\t\tphoneNumber_ := contact.PhoneNumber[1:len(contact.PhoneNumber)]\n\t\tcontact.PhoneNumber = phoneNumber_\n\t}\n\n\t// save the contact in DB\n\tcontact.AccountID = accountId\n\tDBConnection.Table(\"contact\").Create(contact)\n\tif contact.ID <= 0 {\n\t\treturn utl.Message(105, \"failed to save contact, tyr again\")\n\t}\n\n\tresponse := utl.Message(0, \"contact has been created\")\n\tresponse[\"data\"] = contact\n\treturn response\n}", "func (client AccountClient) Update(ctx context.Context, resourceGroupName string, accountName string, body AccountResourcePatchDescription) (result AccountUpdateFuture, err error) {\n if tracing.IsEnabled() {\n ctx = tracing.StartSpan(ctx, fqdn + \"/AccountClient.Update\")\n defer func() {\n sc := -1\n if result.Response() != nil {\n sc = result.Response().StatusCode\n }\n tracing.EndSpan(ctx, sc, err)\n }()\n }\n req, err := client.UpdatePreparer(ctx, resourceGroupName, accountName, body)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"microsoftazuremanagementaisupercomputer.AccountClient\", \"Update\", nil , \"Failure preparing request\")\n return\n }\n\n result, err = client.UpdateSender(req)\n if err != nil {\n err = autorest.NewErrorWithError(err, \"microsoftazuremanagementaisupercomputer.AccountClient\", \"Update\", result.Response(), \"Failure sending request\")\n return\n }\n\n return\n}", "func ParseUpdateContactResponse(rsp *http.Response) (*UpdateContactResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &UpdateContactResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest ScalewayDomainV2alpha2Contact\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (r *CustomersService) Update(customerId string, customer *Customer) *CustomersUpdateCall {\n\treturn &CustomersUpdateCall{\n\t\ts: r.s,\n\t\tcustomerId: customerId,\n\t\tcustomer: customer,\n\t\tcaller_: googleapi.JSONCall{},\n\t\tparams_: make(map[string][]string),\n\t\tpathTemplate_: \"customers/{customerId}\",\n\t\tcontext_: googleapi.NoContext,\n\t}\n}", "func (a *Client) DeleteContact(params *DeleteContactParams, authInfo runtime.ClientAuthInfoWriter) error {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewDeleteContactParams()\n\t}\n\n\t_, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"deleteContact\",\n\t\tMethod: \"DELETE\",\n\t\tPathPattern: \"/contacts/{contactid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &DeleteContactReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (*UsersController) AddContact(ctx *gin.Context) {\n\tcontactIn, err := GetParam(ctx, \"username\")\n\tif err != nil {\n\t\treturn\n\t}\n\tuser, err := PreCheckUser(ctx)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tvar contact = tat.User{}\n\tfound, err := userDB.FindByUsername(&contact, contactIn)\n\tif !found {\n\t\tAbortWithReturnError(ctx, http.StatusBadRequest, fmt.Errorf(\"user with username %s does not exist\", contactIn))\n\t\treturn\n\t} else if err != nil {\n\t\tAbortWithReturnError(ctx, http.StatusInternalServerError, fmt.Errorf(\"Error while fetching user with username %s\", contactIn))\n\t\treturn\n\t}\n\n\tif err := userDB.AddContact(&user, contact.Username, contact.Fullname); err != nil {\n\t\tAbortWithReturnError(ctx, http.StatusInternalServerError, fmt.Errorf(\"Error while add contact %s to user:%s\", contact.Username, user.Username))\n\t\treturn\n\t}\n\tctx.JSON(http.StatusCreated, \"\")\n}", "func UpdateCustomer(ctx context.Context, customerClient proto.CustomerSrvService, cust *proto.Customer) (*proto.Customer, error) {\n\t_, validThru := timeStringToTimestamp(\"2021-06-26\")\n\n\tcust.Name = \"Just Ok Customer\"\n\tcust.ValidityDates.ValidFrom = ptypes.TimestampNow()\n\tcust.ValidityDates.ValidThru = validThru\n\tcust.Modifications.UpdateDate = ptypes.TimestampNow()\n\tcust.Modifications.ModifiedBy = \"3308341401806443521\"\n\n\tresp, err := customerClient.UpdateCustomer(ctx, cust)\n\n\tif err != nil {\n\t\tlog.Printf(\"Unable to update customer. Error: %v\", err)\n\t\treturn nil, err\n\t}\n\tfmt.Printf(\"Updated customer %v\\n\", resp.GetCustomer())\n\n\tif len(resp.GetValidationErr().GetFailureDesc()) > 0 {\n\t\tfmt.Printf(\"Update customer validations %v\\n\", resp.GetValidationErr().GetFailureDesc())\n\t}\n\n\treturn resp.GetCustomer(), nil\n}", "func createContact(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\n\tvar contact schema.Contact\n\n\t// we decode our body request params\n\t_ = json.NewDecoder(r.Body).Decode(&contact)\n\n\tresult, err := contacts.InsertOne(context.TODO(), contact)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tjson.NewEncoder(w).Encode(result)\n}", "func (h *campaignHandler) UpdateCampaign(c *gin.Context) {\n\tvar inputID campaign.GetCampaignDetailInput\n\n\terr := c.ShouldBindUri(&inputID)\n\tif err != nil {\n\t\tresponse := helper.APIResponse(\"Failed to update campaign ID\", http.StatusBadRequest, \"error\", nil)\n\t\tc.JSON(http.StatusBadRequest, response)\n\t\treturn\n\t}\n\n\tvar inputData campaign.CreateCampaignInput\n\n\terr = c.ShouldBindJSON(&inputData)\n\tif err != nil {\n\t\terrors := helper.FormatValidationError(err)\n\n\t\terrorMessage := gin.H{\"errors\": errors}\n\t\tresponse := helper.APIResponse(\"Failed to update campaign Data\", http.StatusBadRequest, \"error\", errorMessage)\n\t\tc.JSON(http.StatusBadRequest, response)\n\t\treturn\n\t}\n\n\tcurrentUser := c.MustGet(\"currentUser\").(user.User)\n\n\tinputData.User = currentUser\n\n\tupdatedCampaign, err := h.service.UpdateCampaign(inputID, inputData)\n\tif err != nil {\n\t\tresponse := helper.APIResponse(\"Failed to update campaign Service\", http.StatusBadRequest, \"error\", nil)\n\t\tc.JSON(http.StatusBadRequest, response)\n\t\treturn\n\t}\n\n\tresponse := helper.APIResponse(\"Success to update campaign\", http.StatusOK, \"success\", campaign.FormatCampaign(updatedCampaign))\n\tc.JSON(http.StatusOK, response)\n}", "func Update(w http.ResponseWriter, r *http.Request) {\n\n\t// Read the request body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to read request: %s\", err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\t//fmt.Printf(\"Body: %s\\n\", body)\n\n\t// Unmarshal request body\n\tbytes := []byte(string(body))\n\tvar registration Registration\n\terr = json.Unmarshal(bytes, &registration)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"cannot unmarshal JSON input: %s\", err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Get a Firestore client.\n\tctx := context.Background()\n\tclient, err := firestore.NewClient(ctx, projectID)\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to create Firestore client: %s\", err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Close client when done.\n\tdefer func() {\n\t\t_ = client.Close() // ignore error\n\t}()\n\n\t// Get document to be updated\n\tdocRef := client.Doc(fmt.Sprintf(\"contacts/%v\", registration.ReporterID))\n\tif docRef == nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to get Firestore document %q: %s\", registration.ReporterID, err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Update \"contagious\" as bool\n\t_, err = docRef.Update(ctx, []firestore.Update{\n\t\t{Path: \"contagious\", Value: registration.Contagious},\n\t})\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to update %q: %s\", \"contagious\", err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Set \"time-contagion-updated\" to current timestamp, if empty\n\tif registration.timeContagionUpdated.IsZero() {\n\t\tregistration.timeContagionUpdated = time.Now()\n\t}\n\n\t// Update \"time-contagion-updated\" as timestamp\n\t_, err = docRef.Update(ctx, []firestore.Update{\n\t\t{Path: \"time-contagion-updated\", Value: registration.timeContagionUpdated},\n\t})\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to update %q: %s\", \"time-contagion-updated\", err), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t// Marshall updated data\n\tregistrationJson, err := json.Marshal(registration)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t//fmt.Println(string(registrationJson))\n\n\t// Response updated data\n\t_, err = fmt.Fprint(w, string(registrationJson))\n\tif err != nil {\n\t\thttp.Error(w, fmt.Sprintf(\"failed to write response: %s\", err), http.StatusInternalServerError)\n\t}\n\treturn\n}", "func (a *Client) SafeContactInfo(params *SafeContactInfoParams) (*SafeContactInfoOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewSafeContactInfoParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"safeContactInfo\",\n\t\tMethod: \"GET\",\n\t\tPathPattern: \"/domainSafeContact/{id}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &SafeContactInfoReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*SafeContactInfoOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for safeContactInfo: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (r ApiUpdateCustomerShippingContactsRequest) CustomerUpdateShippingContacts(customerUpdateShippingContacts CustomerUpdateShippingContacts) ApiUpdateCustomerShippingContactsRequest {\n\tr.customerUpdateShippingContacts = &customerUpdateShippingContacts\n\treturn r\n}", "func (c *Client) UpdatePerson(\n\tpersonGroupId string,\n\tpersonId string,\n\tname string,\n\tuserData string,\n) (err error) {\n\treturn cognitive.FaceUpdatePerson(\n\t\tc.Location,\n\t\tc.ApiKey,\n\t\tpersonGroupId,\n\t\tpersonId,\n\t\tname,\n\t\tuserData,\n\t)\n}", "func (prod *Customer) UpdateCustomer(ctx context.Context, req *custProto.CustomerUpdateRequest) (*custProto.CustomerUpdateResponse, error) {\n\n\tlog.Printf(\" UpdateProduct Method - Started\")\n\n\tsession := dbconn.SessionSetUp()\n\n\t//log.Println(\" product request :- \", req)\n\n\terr := session.Query(\"update customer set mobile = ? where cust_id = ?\", req.Mobile, req.CustId).Exec()\n\tif err != nil {\n\t\tlog.Println(\" Error while updating product record in the repository\")\n\t\tlog.Println(err)\n\t}\n\n\tresponse := &custProto.CustomerUpdateResponse{Updated: true}\n\n\tlog.Printf(\" UpdateProduct Method - Ended\")\n\n\treturn response, nil\n}", "func (m *DevelopperMutation) SetContactID(id int) {\n\tm.contact = &id\n}", "func (sdk *Sdk) UpdateCampaign(id string, body *CampaignUpdateBody) (string, error) {\n\tsdkC := sdk.cms\n\tendpoint := fmt.Sprintf(\"/campaigns/%s\", id)\n\n\treturn sdkC.rq.PutJSON(endpoint, body)\n}", "func (a *Client) UpdateCustomer(params *UpdateCustomerParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*UpdateCustomerOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewUpdateCustomerParams()\n\t}\n\top := &runtime.ClientOperation{\n\t\tID: \"UpdateCustomer\",\n\t\tMethod: \"PUT\",\n\t\tPathPattern: \"/v2/customers/{customer_id}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &UpdateCustomerReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t}\n\tfor _, opt := range opts {\n\t\topt(op)\n\t}\n\n\tresult, err := a.transport.Submit(op)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*UpdateCustomerOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for UpdateCustomer: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func Contact(id string, token string) (ContactReturn, error) {\n\n\t// Set config for new request\n\tr := Request{\"/contacts/\" + id, \"GET\", token, nil}\n\n\t// Send new request\n\tresponse, err := r.Send()\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Close response body after function ends\n\tdefer response.Body.Close()\n\n\t// Decode data\n\tvar decode ContactReturn\n\n\terr = json.NewDecoder(response.Body).Decode(&decode)\n\tif err != nil {\n\t\treturn ContactReturn{}, err\n\t}\n\n\t// Return data\n\treturn decode, nil\n\n}", "func NewCfnContact_Override(c CfnContact, scope awscdk.Construct, id *string, props *CfnContactProps) {\n\t_init_.Initialize()\n\n\t_jsii_.Create(\n\t\t\"monocdk.aws_ssmcontacts.CfnContact\",\n\t\t[]interface{}{scope, id, props},\n\t\tc,\n\t)\n}", "func (o *UpdateContactMethodParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\t// path param contact_method_id\n\tif err := r.SetPathParam(\"contact_method_id\", o.ContactMethodID); err != nil {\n\t\treturn err\n\t}\n\n\t// path param id\n\tif err := r.SetPathParam(\"id\", o.ID); err != nil {\n\t\treturn err\n\t}\n\n\tif o.User != nil {\n\t\tif err := r.SetBodyParam(o.User); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func UpdatePerson(c *gin.Context) {\n // Get the person to be updated\n var person models.Person\n if err := models.DB.First(&person, \"id = ?\", c.Param(\"id\")).Error; err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n // Validate input\n var input UpdatePersonInput\n if err := c.ShouldBindJSON(&input); err != nil {\n c.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n return\n }\n\n models.DB.Model(&person).Updates(input)\n\n c.JSON(http.StatusOK, gin.H{\"data\": person})\n}" ]
[ "0.80027974", "0.7936315", "0.78863955", "0.78558725", "0.7480933", "0.74498266", "0.72730446", "0.7260066", "0.72459537", "0.720468", "0.7017452", "0.6891905", "0.6837758", "0.675005", "0.66673815", "0.6621675", "0.6616846", "0.6556992", "0.6531903", "0.64473265", "0.63968956", "0.63299507", "0.6258559", "0.6240635", "0.61946785", "0.6161515", "0.6143053", "0.613945", "0.6131891", "0.6116912", "0.6062296", "0.5982182", "0.59604007", "0.59417886", "0.58126456", "0.5795379", "0.57923216", "0.578202", "0.57580674", "0.5730902", "0.5690687", "0.56656927", "0.5644735", "0.5635275", "0.56226116", "0.5617015", "0.5613034", "0.55888826", "0.5564775", "0.5562624", "0.5556846", "0.55536354", "0.551167", "0.55099523", "0.5472245", "0.5466062", "0.54654086", "0.5432967", "0.5424698", "0.5414315", "0.539797", "0.5390724", "0.53882176", "0.53702796", "0.5349356", "0.53452015", "0.53348607", "0.5330495", "0.5328269", "0.52989674", "0.5291908", "0.52899015", "0.52777535", "0.5266215", "0.5253278", "0.5239431", "0.5230804", "0.5228584", "0.52111226", "0.5207245", "0.5203903", "0.5194542", "0.5180205", "0.5168899", "0.5168828", "0.51622874", "0.5150694", "0.51425904", "0.5123929", "0.512179", "0.5115732", "0.5114383", "0.5106758", "0.50910085", "0.50758123", "0.5073761", "0.5071138", "0.5065356", "0.5064925", "0.50643545" ]
0.861424
0
Validate validates this sovren resume bimetric match request
func (m *SovrenResumeBimetricMatchRequest) Validate(formats strfmt.Registry) error { var res []error if err := m.validatePreferredCategoryWeights(formats); err != nil { res = append(res, err) } if err := m.validateSettings(formats); err != nil { res = append(res, err) } if err := m.validateSourceResume(formats); err != nil { res = append(res, err) } if err := m.validateTargetJobs(formats); err != nil { res = append(res, err) } if err := m.validateTargetResumes(formats); err != nil { res = append(res, err) } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (a *BookAppointment) VaccineCenterValidation() *apierrors.RestErr{\r\n\r\n\t//Loop to check the booking VC in available VC array\r\n\tfor _, value := range AvailableVaccinationCenter {\r\n\t\tif a.UserVC == value {\r\n\t\t\treturn nil\r\n\t\t}\r\n\t}\r\n\r\n\treturn apierrors.NewBadRequestError(\r\n\t\tfmt.Sprintf(\"Vaccination is not available %s,Please choose between 1.Nungambakkam 2.Tambaram 3.Velachery 4.Shozhinganallur\",a.UserVC))\r\n\r\n}", "func (mt *EasypostScanform) Validate() (err error) {\n\tif mt.Address != nil {\n\t\tif err2 := mt.Address.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\tif mt.ID != nil {\n\t\tif ok := goa.ValidatePattern(`^sf_`, *mt.ID); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.id`, *mt.ID, `^sf_`))\n\t\t}\n\t}\n\tif ok := goa.ValidatePattern(`^ScanForm$`, mt.Object); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.object`, mt.Object, `^ScanForm$`))\n\t}\n\tif mt.Status != nil {\n\t\tif !(*mt.Status == \"creating\" || *mt.Status == \"created\" || *mt.Status == \"failed\") {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.status`, *mt.Status, []interface{}{\"creating\", \"created\", \"failed\"}))\n\t\t}\n\t}\n\treturn\n}", "func (m *SovrenResumeBimetricMatchRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidatePreferredCategoryWeights(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSettings(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSourceResume(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateTargetJobs(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateTargetResumes(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *VerifyCVVRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for AccessToken\n\n\t// no validation rules for CardId\n\n\t// no validation rules for Cvv\n\n\treturn nil\n}", "func (o *Virtualserver) validate(dbRecord *common.DbRecord) (ok bool, err error) {\n\t////////////////////////////////////////////////////////////////////////////\n\t// Marshal data interface.\n\t////////////////////////////////////////////////////////////////////////////\n\tvar data virtualserver.Data\n\terr = shared.MarshalInterface(dbRecord.Data, &data)\n\tif err != nil {\n\t\treturn\n\t}\n\t////////////////////////////////////////////////////////////////////////////\n\t// Test required fields.\n\t////////////////////////////////////////////////////////////////////////////\n\tok = true\n\trequired := make(map[string]bool)\n\trequired[\"ProductCode\"] = false\n\trequired[\"IP\"] = false\n\trequired[\"Port\"] = false\n\trequired[\"LoadBalancerIP\"] = false\n\trequired[\"Name\"] = false\n\t////////////////////////////////////////////////////////////////////////////\n\tif data.ProductCode != 0 {\n\t\trequired[\"ProductCode\"] = true\n\t}\n\tif len(dbRecord.LoadBalancerIP) > 0 {\n\t\trequired[\"LoadBalancerIP\"] = true\n\t}\n\tif len(data.Ports) != 0 {\n\t\trequired[\"Port\"] = true\n\t}\n\tif data.IP != \"\" {\n\t\trequired[\"IP\"] = true\n\t}\n\tif data.Name != \"\" {\n\t\trequired[\"Name\"] = true\n\t}\n\tfor _, val := range required {\n\t\tif val == false {\n\t\t\tok = false\n\t\t}\n\t}\n\tif !ok {\n\t\terr = fmt.Errorf(\"missing required fields - %+v\", required)\n\t}\n\treturn\n}", "func (r *DownloadDiffRequest) Validate() error {\n\tif err := requireProject(r.GetProject()); err != nil {\n\t\treturn err\n\t}\n\tif err := requireCommittish(\"committish\", r.GetCommittish()); err != nil {\n\t\treturn err\n\t}\n\tif base := r.GetBase(); base != \"\" {\n\t\tif err := requireCommittish(\"base\", base); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif strings.HasPrefix(r.Path, \"/\") {\n\t\treturn errors.New(\"path must not start with /\")\n\t}\n\treturn nil\n}", "func (mt *EasypostParcel) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Object == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"object\"))\n\t}\n\n\tif ok := goa.ValidatePattern(`^prcl_`, mt.ID); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.id`, mt.ID, `^prcl_`))\n\t}\n\tif !(mt.Mode == \"test\" || mt.Mode == \"production\") {\n\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.mode`, mt.Mode, []interface{}{\"test\", \"production\"}))\n\t}\n\tif ok := goa.ValidatePattern(`^Parcel$`, mt.Object); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.object`, mt.Object, `^Parcel$`))\n\t}\n\treturn\n}", "func (lm MatchReleaseLimiter) validate() error {\n\tif lm.QPS <= 0 {\n\t\treturn errors.New(\"invalid matchReleaseLimiter.qps value, should >= 1\")\n\t}\n\n\tif lm.Burst <= 0 {\n\t\treturn errors.New(\"invalid matchReleaseLimiter.burst value, should >= 1\")\n\t}\n\n\tif lm.WaitTimeMil <= 0 {\n\t\treturn errors.New(\"invalid matchReleaseLimiter.waitTimeMil value, should >= 1\")\n\t}\n\n\treturn nil\n}", "func (r *createRequest) Validate() *validation.Output {\n\to := &validation.Output{Valid: true}\n\tvar err error\n\tvar PartnerID int64\n\tvar partner *model.Partnership\n\tvar sorder *model.SalesOrder\n\tvar code string\n\n\t// cek partner id ada atau tidak\n\tPartnerID, err = common.Decrypt(r.CustomerID)\n\tif err != nil {\n\t\to.Failure(\"customer_id.invalid\", \"Partnership id is invalid\")\n\t}\n\n\t// cek partner ecist or not\n\tpartner, err = partnership.GetPartnershipByField(\"id\", PartnerID)\n\n\tif err != nil || partner == nil || partner.IsDeleted == 1 || partner.IsArchived == 1 {\n\t\to.Failure(\"customer_id.invalid\", \"Partnership id is not found\")\n\t} else {\n\t\tcode, _ = CodeGen(partner.IsDefault == 1)\n\n\t\tif partner.PartnershipType != \"customer\" {\n\t\t\to.Failure(\"customer_id\", \"Customer needed to have partner type customer not supplier\")\n\t\t}\n\n\t\tif partner.IsDefault == int8(1) {\n\t\t\tif r.AutoFullfilment == int8(0) {\n\t\t\t\to.Failure(\"auto_fullfilment\", \"auto fullfilment need to be filled if Customer is walk in\")\n\t\t\t}\n\t\t\tif r.AutoInvoice == int8(0) {\n\t\t\t\to.Failure(\"auto_invoice\", \"auto invoice need to be filled if Customer is walk in\")\n\t\t\t}\n\t\t\tif r.EtaDate.IsZero() {\n\t\t\t\to.Failure(\"eta_date\", \"ETA Date need to be filled if Customer is walk in\")\n\t\t\t}\n\t\t} else {\n\t\t\tif r.ShipmentAddress == \"\" {\n\t\t\t\to.Failure(\"shipment_address\", \"Shipment address is required\")\n\t\t\t}\n\t\t}\n\n\t\tif partner.OrderRule == \"one_bill\" {\n\t\t\tvar soContainer *model.SalesOrder\n\t\t\torm.NewOrm().Raw(\"SELECT * FROM sales_order WHERE customer_id = ? AND document_status = 'new' OR document_status = 'active' AND invoice_status = 'active';\", PartnerID).QueryRow(&soContainer)\n\t\t\tif soContainer != nil {\n\t\t\t\to.Failure(\"customer_id\", \"Partner still have unfinished invoice\")\n\t\t\t}\n\t\t} else if partner.OrderRule == \"plafon\" {\n\t\t\tcurrent := partner.TotalDebt + r.TotalCharge\n\t\t\tif current >= partner.MaxPlafon {\n\t\t\t\to.Failure(\"customer_id\", \"Partnership has already reached given max plafon\")\n\t\t\t}\n\t\t}\n\t}\n\n\tif r.IsPaid == 1 {\n\t\tif r.AutoInvoice != 1 {\n\t\t\to.Failure(\"auto_invoice\", \"Auto invoice must be checked if Auto Paid is checked\")\n\t\t}\n\t}\n\n\tso := &model.SalesOrder{Code: code}\n\n\tif err := so.Read(\"Code\"); err == nil {\n\t\to.Failure(\"code\", \"Code sales order is already being used\")\n\t} else {\n\t\tr.Code = code\n\t}\n\n\ttz := time.Time{}\n\tif r.EtaDate == tz {\n\t\to.Failure(\"eta_date\", \"Field is required.\")\n\t}\n\n\t// cek status reference id\n\tif r.ReferencesID != \"\" {\n\t\trefID, err := common.Decrypt(r.ReferencesID)\n\t\tif err != nil {\n\t\t\to.Failure(\"references_id\", \"References id is not valid\")\n\t\t}\n\t\tvar emptyLoad []string\n\t\tsorder, err = GetDetailSalesOrder(refID, emptyLoad)\n\t\tif err != nil {\n\t\t\to.Failure(\"references_id\", \"References is not found\")\n\t\t} else {\n\t\t\tif sorder.DocumentStatus != \"approved_cancel\" {\n\t\t\t\to.Failure(\"references_id\", \"References document status is not cancel\")\n\t\t\t}\n\t\t}\n\t}\n\n\tcheckDuplicate := make(map[string]bool)\n\tvar checkVariant = make(map[int64]*model.ItemVariant)\n\n\tfor _, row := range r.SalesOrderItem {\n\t\tIVariantID, _ := common.Decrypt(row.ItemVariantID)\n\t\tivar := &model.ItemVariant{ID: IVariantID}\n\t\tivar.Read(\"ID\")\n\t\t////////////////////////////////\n\t\tif checkVariant[ivar.ID] == nil {\n\t\t\tivar.CommitedStock -= row.Quantity\n\t\t\tcheckVariant[ivar.ID] = ivar\n\t\t} else {\n\t\t\tvariant := checkVariant[ivar.ID]\n\t\t\tvariant.CommitedStock -= row.Quantity\n\t\t\tcheckVariant[ivar.ID] = variant\n\t\t}\n\t\t////////////////////////////////\n\t}\n\n\t// cek setiap sales order item\n\tfor i, row := range r.SalesOrderItem {\n\t\tvar UnitA float64\n\t\tvar PricingID, IVariantID int64\n\t\tvar ItemVariant *model.ItemVariant\n\t\tvar IVPrice *model.ItemVariantPrice\n\t\t// cek item variant,pricing type dan item variant price\n\t\tPricingID, err = common.Decrypt(row.PricingType)\n\n\t\tif err != nil {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.pricing_type.invalid\", i), \"Pricing type id is invalid\")\n\t\t}\n\n\t\tIVariantID, err = common.Decrypt(row.ItemVariantID)\n\t\tif err != nil {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.item_variant.invalid\", i), \"Item Variant id is invalid\")\n\t\t}\n\n\t\tvar pt = &model.PricingType{ID: PricingID}\n\t\tpt.Read(\"ID\")\n\t\tvar iv = &model.ItemVariant{ID: IVariantID}\n\t\tiv.Read(\"ID\")\n\n\t\tIVPrice, err = getItemVariantPricing(pt, iv)\n\t\tif err == nil {\n\t\t\tif pt.ParentType != nil {\n\t\t\t\tif pt.RuleType == \"increment\" {\n\t\t\t\t\tif pt.IsPercentage == int8(1) {\n\t\t\t\t\t\ttemp := (pt.Nominal * IVPrice.UnitPrice) / float64(100)\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice + temp\n\t\t\t\t\t} else {\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice + pt.Nominal\n\t\t\t\t\t}\n\n\t\t\t\t\tif row.UnitPrice < UnitA {\n\t\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"Unit price is too small\")\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tif pt.IsPercentage == int8(1) {\n\n\t\t\t\t\t\ttemp := (pt.Nominal * IVPrice.UnitPrice) / float64(100)\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice - temp\n\t\t\t\t\t} else {\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice - pt.Nominal\n\t\t\t\t\t}\n\n\t\t\t\t\tif UnitA < 0 {\n\t\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.pricing_type.invalid\", i), \"Pricing type can make price become zero\")\n\t\t\t\t\t}\n\t\t\t\t\tif row.UnitPrice < UnitA {\n\t\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"Unit price is too small\")\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif row.UnitPrice < IVPrice.UnitPrice {\n\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"Unit price is too small\")\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"item variant price doesn't exist\")\n\t\t}\n\n\t\tItemVariant, err = inventory.GetDetailItemVariant(\"id\", IVariantID)\n\t\tif err != nil || ItemVariant == nil || ItemVariant.IsDeleted == int8(1) || ItemVariant.IsArchived == int8(1) {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.item_variant_id.invalid\", i), \"Item variant id not found\")\n\t\t} else {\n\n\t\t\t// cek stock dari item variant sama quantity soi\n\t\t\tif (checkVariant[ItemVariant.ID].AvailableStock - ItemVariant.CommitedStock) < row.Quantity {\n\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.quantity.invalid\", i), \"Stock item is not enough to be sold\")\n\t\t\t}\n\n\t\t\t//check duplicate item variant id\n\t\t\tif checkDuplicate[row.ItemVariantID] == true {\n\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.item_variant_id.invalid\", i), \" item variant id duplicate\")\n\t\t\t} else {\n\t\t\t\tcheckDuplicate[row.ItemVariantID] = true\n\t\t\t}\n\t\t}\n\n\t\tdiscamount := (row.UnitPrice * float64(row.Discount)) / float64(100)\n\t\tcuramount := row.UnitPrice * float64(row.Quantity)\n\t\tsubtotal := common.FloatPrecision(curamount-discamount, 0)\n\n\t\tr.TotalPrice += subtotal\n\t}\n\n\tif r.IsPercentageDiscount == int8(1) {\n\t\tif r.Discount < 0 || r.Discount > float32(100) {\n\t\t\to.Failure(\"discount\", \"discount is less than and equal 0 or greater than 100\")\n\t\t}\n\t}\n\n\treturn o\n}", "func (mt *EasypostInsurance) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Object == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"object\"))\n\t}\n\n\tif mt.Fee != nil {\n\t\tif err2 := mt.Fee.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\tif mt.FromAddress != nil {\n\t\tif err2 := mt.FromAddress.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\tif ok := goa.ValidatePattern(`^ins_`, mt.ID); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.id`, mt.ID, `^ins_`))\n\t}\n\tif !(mt.Mode == \"test\" || mt.Mode == \"production\") {\n\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.mode`, mt.Mode, []interface{}{\"test\", \"production\"}))\n\t}\n\tif ok := goa.ValidatePattern(`^Insurance$`, mt.Object); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.object`, mt.Object, `^Insurance$`))\n\t}\n\tif mt.Status != nil {\n\t\tif !(*mt.Status == \"cancelled\" || *mt.Status == \"failed\" || *mt.Status == \"purchased\" || *mt.Status == \"pending\" || *mt.Status == \"new\") {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.status`, *mt.Status, []interface{}{\"cancelled\", \"failed\", \"purchased\", \"pending\", \"new\"}))\n\t\t}\n\t}\n\tif mt.ToAddress != nil {\n\t\tif err2 := mt.ToAddress.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\tif mt.Tracker != nil {\n\t\tif err2 := mt.Tracker.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\treturn\n}", "func (r *DownloadFileRequest) Validate() error {\n\tif err := requireProject(r.GetProject()); err != nil {\n\t\treturn err\n\t}\n\tif err := requireCommittish(\"committish\", r.GetCommittish()); err != nil {\n\t\treturn err\n\t}\n\tif strings.HasPrefix(r.Path, \"/\") {\n\t\treturn errors.New(\"path must not start with /\")\n\t}\n\treturn nil\n}", "func (v GetApiHostnameCoverageMatchTargetsRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t}.Filter()\n}", "func (r SyncMailRequest) Validate() error {\n\tif r.Limit == 0 {\n\t\treturn errors.New(\"invalid 'Limit' value, expected value greater than 0\")\n\t}\n\n\tif r.Limit > MaxLimitInSyncMailRequest {\n\t\treturn fmt.Errorf(\"invalid 'Limit' value, expected value lower than %d\", MaxLimitInSyncMailRequest)\n\t}\n\n\tif r.Lower > r.Upper {\n\t\treturn errors.New(\"invalid 'Lower' value, can't be greater than 'Upper'\")\n\t}\n\n\treturn nil\n}", "func (a *ADKProver) Valid(proof uint32) error {\n\tp, err := a.Proof()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"proof generation\")\n\t}\n\n\tif p != proof {\n\t\treturn ErrADKProofMismatch\n\t}\n\n\treturn nil\n}", "func (er *AuthECRecoveryRequest) Validate() error {\n\t// TODO: brush up the validiation e.g. byte string should begin with 0x or so...\n\tif len(er.SigR) != 32 {\n\t\treturn errors.New(\"sig_r's length should be 32\")\n\t}\n\tif len(er.SigS) != 32 {\n\t\treturn errors.New(\"sig_s's length should be 32\")\n\t}\n\tif len(er.Data) < 0 {\n\t\treturn errors.New(\"raw_tx should be non empty\")\n\t}\n\tif len(er.OriginalSigner) < 0 {\n\t\treturn errors.New(\"original_signer should be non empty string\")\n\t}\n\n\treturn nil\n}", "func (m *AssessEvidenceRequest) Validate() error {\n\treturn m.validate(false)\n}", "func (h txApproveHandler) validate() error {\n\tif h.issuerKP == nil {\n\t\treturn errors.New(\"issuer keypair cannot be nil\")\n\t}\n\tif h.assetCode == \"\" {\n\t\treturn errors.New(\"asset code cannot be empty\")\n\t}\n\tif h.horizonClient == nil {\n\t\treturn errors.New(\"horizon client cannot be nil\")\n\t}\n\tif h.networkPassphrase == \"\" {\n\t\treturn errors.New(\"network passphrase cannot be empty\")\n\t}\n\tif h.db == nil {\n\t\treturn errors.New(\"database cannot be nil\")\n\t}\n\tif h.kycThreshold <= 0 {\n\t\treturn errors.New(\"kyc threshold cannot be less than or equal to zero\")\n\t}\n\tif h.baseURL == \"\" {\n\t\treturn errors.New(\"base url cannot be empty\")\n\t}\n\treturn nil\n}", "func (m *ConfigureAssessmentRequest) Validate() error {\n\treturn m.validate(false)\n}", "func (arc *AppointmentResultsCreate) check() error {\n\tif _, ok := arc.mutation.CauseAppoint(); !ok {\n\t\treturn &ValidationError{Name: \"causeAppoint\", err: errors.New(\"ent: missing required field \\\"causeAppoint\\\"\")}\n\t}\n\tif v, ok := arc.mutation.CauseAppoint(); ok {\n\t\tif err := appointmentresults.CauseAppointValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"causeAppoint\", err: fmt.Errorf(\"ent: validator failed for field \\\"causeAppoint\\\": %w\", err)}\n\t\t}\n\t}\n\tif _, ok := arc.mutation.Advice(); !ok {\n\t\treturn &ValidationError{Name: \"advice\", err: errors.New(\"ent: missing required field \\\"advice\\\"\")}\n\t}\n\tif v, ok := arc.mutation.Advice(); ok {\n\t\tif err := appointmentresults.AdviceValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"advice\", err: fmt.Errorf(\"ent: validator failed for field \\\"advice\\\": %w\", err)}\n\t\t}\n\t}\n\tif _, ok := arc.mutation.DateAppoint(); !ok {\n\t\treturn &ValidationError{Name: \"dateAppoint\", err: errors.New(\"ent: missing required field \\\"dateAppoint\\\"\")}\n\t}\n\tif _, ok := arc.mutation.TimeAppoint(); !ok {\n\t\treturn &ValidationError{Name: \"timeAppoint\", err: errors.New(\"ent: missing required field \\\"timeAppoint\\\"\")}\n\t}\n\tif _, ok := arc.mutation.AddtimeSave(); !ok {\n\t\treturn &ValidationError{Name: \"addtimeSave\", err: errors.New(\"ent: missing required field \\\"addtimeSave\\\"\")}\n\t}\n\tif _, ok := arc.mutation.HourBeforeAppoint(); !ok {\n\t\treturn &ValidationError{Name: \"hourBeforeAppoint\", err: errors.New(\"ent: missing required field \\\"hourBeforeAppoint\\\"\")}\n\t}\n\tif v, ok := arc.mutation.HourBeforeAppoint(); ok {\n\t\tif err := appointmentresults.HourBeforeAppointValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"hourBeforeAppoint\", err: fmt.Errorf(\"ent: validator failed for field \\\"hourBeforeAppoint\\\": %w\", err)}\n\t\t}\n\t}\n\tif _, ok := arc.mutation.MinuteBeforeAppoint(); !ok {\n\t\treturn &ValidationError{Name: \"minuteBeforeAppoint\", err: errors.New(\"ent: missing required field \\\"minuteBeforeAppoint\\\"\")}\n\t}\n\tif v, ok := arc.mutation.MinuteBeforeAppoint(); ok {\n\t\tif err := appointmentresults.MinuteBeforeAppointValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"minuteBeforeAppoint\", err: fmt.Errorf(\"ent: validator failed for field \\\"minuteBeforeAppoint\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (m *RearrangeRequest) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (r *RouteClaim) Validate(ctx context.Context) (errs *apis.FieldError) {\n\t// If we're specifically updating status, don't reject the change because\n\t// of a spec issue.\n\tif apis.IsInStatusUpdate(ctx) {\n\t\treturn\n\t}\n\n\tif r.Name == \"\" {\n\t\terrs = errs.Also(apis.ErrMissingField(\"name\"))\n\t}\n\n\terrs = errs.Also(r.Spec.Validate(apis.WithinSpec(ctx)).ViaField(\"spec\"))\n\n\t// If we have errors, bail. No need to do the network call.\n\tif errs.Error() != \"\" {\n\t\treturn errs\n\t}\n\n\treturn checkVirtualServiceCollision(ctx, r.Spec.Hostname, r.Spec.Domain, r.GetNamespace(), errs)\n}", "func (r *Request) Validate() error {\n\tif r.Start != StartOffset && r.Offset != nil {\n\t\treturn fmt.Errorf(\"only specify one of Start or Offset: start=%s offset=%d\", r.Start, *r.Offset)\n\t}\n\tif r.Start != StartOffset && r.Start != StartFirst && r.Start != StartLast {\n\t\treturn fmt.Errorf(\"start must be one of %q, %q, or %q\", StartFirst, StartLast, StartOffset)\n\t}\n\tif err := r.Subset.Validate(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (ut *AccountVerificationInputPayload) Validate() (err error) {\n\tif ut.VerificationToken != nil {\n\t\tif utf8.RuneCountInString(*ut.VerificationToken) < 108 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`response.verification_token`, *ut.VerificationToken, utf8.RuneCountInString(*ut.VerificationToken), 108, true))\n\t\t}\n\t}\n\treturn\n}", "func (p *PesaLinkRequest) Validate() {\n\t///err = s.Validate.Struct(p)\n\t//Start of source validation\n\t///if IsEmpty(p.Source.AccountNumber) {\n\n\t//}\n\t//if IsEmpty(p.Source.Name) {\n\n\t//}\n\t//if IsEmpty(p.Source.CountryCode) {\n\n\t//}\n\t///End of source validation\n\t//if IsEmpty(p.Destination.CountryCode) {\n\n\t//}\n\t//if IsEmpty(p.Destination.Name) {\n\n\t//}\n\t//if IsEmpty(p.Destination.CountryCode) {\n\n\t//}\n\n}", "func (api *Api) isValidCalculateRequest() bool {\n\treturn api.calculateBody.A != nil && api.calculateBody.B != nil\n}", "func (r *CreateWalletRequest) Validate(approver Rule) error {\n\terrs := Errors{\n\t\t\"/data/\": Validate(r.Data, Required),\n\t\t\"/data/relationships/kdf\": Validate(r.Data.Relationships.KDF, Required),\n\t\t\"/data/relationships/factor\": Validate(r.Data.Relationships.Factor, Required),\n\t\t\"/data/relationships/recovery\": Validate(r.Data.Relationships.Recovery, Required),\n\t\t\"/data/attributes/email\": Validate(r.Data.Attributes.Email, approver, is.Email, Required),\n\t}\n\n\tif r.Data.Relationships.Recovery != nil {\n\t\terrs[\"/data/relationships/recovery/account_id\"] = Validate(\n\t\t\tr.Data.Relationships.Recovery.Data.Attributes.AccountID, Required)\n\t}\n\tif r.Data.Relationships.Referrer != nil {\n\t\terrs[\"/data/relationships/referrer\"] = Validate(r.Data.Relationships.Referrer)\n\t}\n\treturn errs.Filter()\n}", "func (ut *accountVerificationInputPayload) Validate() (err error) {\n\tif ut.VerificationToken != nil {\n\t\tif utf8.RuneCountInString(*ut.VerificationToken) < 108 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`response.verification_token`, *ut.VerificationToken, utf8.RuneCountInString(*ut.VerificationToken), 108, true))\n\t\t}\n\t}\n\treturn\n}", "func (m *MaxofRanparameters) Validate() error {\n\treturn m.validate(false)\n}", "func (m Match) IsValid() *ValidationError {\n\tecase := func(f, msg string) ErrorCase {\n\t\treturn ErrorCase{f, msg}\n\t}\n\n\terrs := &ValidationError{}\n\n\tvalidKind := m.Kind == CookieMatchKind ||\n\t\tm.Kind == HeaderMatchKind ||\n\t\tm.Kind == QueryMatchKind\n\n\tif !validKind {\n\t\terrs.AddNew(ecase(\n\t\t\t\"kind\",\n\t\t\tfmt.Sprintf(\"%q is not a valid match kind\", m.Kind)))\n\t}\n\n\tvalidBehavior := m.Behavior == ExactMatchBehavior ||\n\t\tm.Behavior == RegexMatchBehavior ||\n\t\tm.Behavior == RangeMatchBehavior ||\n\t\tm.Behavior == PrefixMatchBehavior ||\n\t\tm.Behavior == SuffixMatchBehavior\n\n\tif !validBehavior {\n\t\terrs.AddNew(ecase(\n\t\t\t\"behavior\",\n\t\t\tfmt.Sprintf(\"%q is not a valid behavior kind\", m.Behavior)))\n\t}\n\n\terrCheckIndex(m.From.Key, errs, \"from.key\")\n\n\tif m.To.Value != \"\" && m.To.Key == \"\" {\n\t\terrs.AddNew(ecase(\"to.key\", \"must not be empty if to.value is set\"))\n\t}\n\n\t// The only time it's ok to not have a specific matched value is with\n\t// exact behavior kind, to indicate that all values should be matched.\n\tif validBehavior && m.From.Value == \"\" && m.Behavior != ExactMatchBehavior {\n\t\terrs.AddNew(\n\t\t\tecase(\n\t\t\t\t\"from.value\",\n\t\t\t\tfmt.Sprintf(\"must not be empty if behavior is %q\", m.Behavior),\n\t\t\t),\n\t\t)\n\t}\n\n\tfor _, combo := range invalidKindBehaviorCombinations {\n\t\tif m.Kind == combo.kind && m.Behavior == combo.behavior {\n\t\t\terrs.AddNew(\n\t\t\t\tecase(\n\t\t\t\t\t\"kind\",\n\t\t\t\t\tfmt.Sprintf(\n\t\t\t\t\t\t`%q kind not supported with %q behavior`,\n\t\t\t\t\t\tm.Kind,\n\t\t\t\t\t\tm.Behavior,\n\t\t\t\t\t),\n\t\t\t\t),\n\t\t\t)\n\t\t\tbreak\n\t\t}\n\t}\n\n\tif m.Behavior == RegexMatchBehavior && m.From.Value != \"\" {\n\t\tif re, e := regexp.Compile(m.From.Value); e != nil {\n\t\t\terrs.AddNew(\n\t\t\t\tecase(\n\t\t\t\t\t\"from.value\",\n\t\t\t\t\te.Error(),\n\t\t\t\t),\n\t\t\t)\n\t\t} else if m.To.Value == \"\" && len(re.SubexpNames()) > 2 {\n\t\t\terrs.AddNew(\n\t\t\t\tecase(\n\t\t\t\t\t\"from.value\",\n\t\t\t\t\t\"must have exactly one subgroup when to.value is not set\",\n\t\t\t\t),\n\t\t\t)\n\t\t}\n\t}\n\n\tif m.Behavior == RangeMatchBehavior && m.From.Value != \"\" {\n\t\t_, _, err := ParseRangeBoundaries(m.From.Value)\n\t\tif err != nil {\n\t\t\terrs.AddNew(\n\t\t\t\tecase(\n\t\t\t\t\t\"from.value\",\n\t\t\t\t\terr.Error(),\n\t\t\t\t),\n\t\t\t)\n\t\t}\n\t}\n\n\treturn errs.OrNil()\n}", "func (p Passport) IsValidV2() bool {\n if !p.IsValid() {\n return false\n }\n\n data := p.innerMap\n\n birthYear, _ := strconv.Atoi(data[\"byr\"])\n if birthYear < 1920 || birthYear > 2002 {\n return false\n }\n\n issueYear, _ := strconv.Atoi(data[\"iyr\"])\n if issueYear < 2010 || issueYear > 2020 {\n return false\n }\n\n expirationYear, _ := strconv.Atoi(data[\"eyr\"])\n if expirationYear < 2020 || expirationYear > 2030 {\n return false\n }\n\n heightIsValid := p.hasValidHeight()\n if !heightIsValid {\n return false\n }\n\n hairColorIsValid := p.hasValidHairColor()\n if !hairColorIsValid {\n return false\n }\n\n eyeColorIsValid := p.hasValidEyeColor()\n if !eyeColorIsValid {\n return false\n }\n\n passportIdIsValid := p.hasValidPassportId()\n if !passportIdIsValid {\n return false\n }\n\n return true\n}", "func (r *LogRequest) Validate() error {\n\tif err := requireProject(r.GetProject()); err != nil {\n\t\treturn err\n\t}\n\tif err := requireCommittish(\"committish\", r.GetCommittish()); err != nil {\n\t\treturn err\n\t}\n\tswitch {\n\tcase strings.Contains(r.Committish, \"..\"):\n\t\treturn errors.New(\"committish cannot contain \\\"..\\\"; use Ancestor instead\")\n\tcase r.PageSize < 0:\n\t\treturn errors.New(\"page size must not be negative\")\n\tdefault:\n\t\treturn nil\n\t}\n}", "func (usr *UpdateSysnonymRequest) Validate(r *http.Request) (bool, *UpdateSysnonymResponse) {\n\n\tupdateSysnonymResponse := new(UpdateSysnonymResponse)\n\n\t// Check if body is empty, because we expect some input\n\tif r.Body == nil {\n\n\t\tupdateSysnonymResponse.Code = status.EmptyBody\n\t\tupdateSysnonymResponse.Errors = append(updateSysnonymResponse.Errors, Error{Code: status.EmptyBody, Message: status.Text(status.EmptyBody)})\n\t\treturn false, updateSysnonymResponse\n\t}\n\n\t// Decode request\n\terr := json.NewDecoder(r.Body).Decode(&usr)\n\n\tdefer r.Body.Close()\n\n\tif err != nil {\n\t\tupdateSysnonymResponse.Code = status.IncorrectBodyFormat\n\t\tupdateSysnonymResponse.Errors = append(updateSysnonymResponse.Errors, Error{Code: status.IncorrectBodyFormat, Message: status.Text(status.IncorrectBodyFormat)})\n\t\treturn false, updateSysnonymResponse\n\t}\n\n\tif len(usr.Word.Word) == 0 {\n\t\tupdateSysnonymResponse.Code = status.ErrorMissingWord\n\t\tupdateSysnonymResponse.Errors = append(updateSysnonymResponse.Errors, Error{Code: status.ErrorMissingWord, Message: status.Text(status.ErrorMissingWord)})\n\t\treturn false, updateSysnonymResponse\n\t}\n\n\tif len(usr.UpdatedWord.Word) == 0 {\n\t\tupdateSysnonymResponse.Code = status.ErrorMissingWord\n\t\tupdateSysnonymResponse.Errors = append(updateSysnonymResponse.Errors, Error{Code: status.ErrorMissingWord, Message: status.Text(status.ErrorMissingWord)})\n\t\treturn false, updateSysnonymResponse\n\t}\n\treturn true, updateSysnonymResponse\n}", "func (m *ModelsMatchmakingResult) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateChannel(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateClientVersion(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateDeployment(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateErrorCode(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateErrorMessage(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateGameMode(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateIsMock(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateMatchID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateMatchingAllies(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateNamespace(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateQueuedAt(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateRegion(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateServerName(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateStatus(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateTicketID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateUpdatedAt(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *AssessEvidencesResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (dto *UpdateTaskRequest) IsValid() *base.ErrorResponse {\n\terrorResp := base.NewErrorResponseUnknownPropertyValue()\n\tif dto.Percentage != nil && *dto.Percentage > 100 {\n\t\treturn errorResp\n\t}\n\tif dto.ExecutionState != nil && !model.IsValidExecutionState(*dto.ExecutionState) {\n\t\treturn errorResp\n\t}\n\tif dto.ExecutionResult != nil && dto.ExecutionResult.State != nil && !model.IsValidExecutionResultState(*dto.ExecutionResult.State) {\n\t\treturn errorResp\n\t}\n\treturn nil\n}", "func validateAFTrafficInfluenceData(ti TrafficInfluSub) (rsp nefSBRspData,\n\tstatus bool) {\n\n\tif len(ti.AfTransID) == 0 {\n\t\trsp.errorCode = 400\n\t\trsp.pd.Title = \"Missing AfTransID atttribute\"\n\t\treturn rsp, false\n\t}\n\n\t//In case AfServiceID is not present then DNN has to be included in TI\n\tif len(ti.AfServiceID) == 0 && len(ti.Dnn) == 0 {\n\n\t\trsp.errorCode = 400\n\t\trsp.pd.Title = \"Missing afServiceId atttribute\"\n\t\treturn rsp, false\n\t}\n\n\tif len(ti.AfAppID) == 0 && ti.TrafficFilters == nil &&\n\t\tti.EthTrafficFilters == nil {\n\t\trsp.errorCode = 400\n\t\trsp.pd.Title = \"missing one of afAppId, trafficFilters,\" +\n\t\t\t\"ethTrafficFilters\"\n\t\treturn rsp, false\n\t}\n\treturn rsp, true\n}", "func (m *CalculateComplianceRequest) Validate() error {\n\treturn m.validate(false)\n}", "func (o *GetIPAMSuggestSubnetIDOKBody) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (r *updateRequest) Validate() *validation.Output {\n\to := &validation.Output{Valid: true}\n\tvar err error\n\t// cek document status so\n\tif r.SalesOrder.DocumentStatus != \"new\" {\n\t\to.Failure(\"document_status\", \"document_status should be new\")\n\t}\n\n\tcheckDuplicate := make(map[string]bool)\n\tcheckVariant := make(map[int64]*model.ItemVariant)\n\n\ttz := time.Time{}\n\tif r.EtaDate == tz {\n\t\to.Failure(\"eta_date\", \"Field is required.\")\n\t}\n\n\tfor _, row := range r.SalesOrderItem {\n\t\tIVariantID, _ := common.Decrypt(row.ItemVariantID)\n\t\tivar := &model.ItemVariant{ID: IVariantID}\n\t\tivar.Read(\"ID\")\n\t\t////////////////////////////////\n\t\tif checkVariant[ivar.ID] == nil {\n\t\t\tivar.CommitedStock -= row.Quantity\n\t\t\tcheckVariant[ivar.ID] = ivar\n\t\t} else {\n\t\t\tvariant := checkVariant[ivar.ID]\n\t\t\tvariant.CommitedStock -= row.Quantity\n\t\t\tcheckVariant[ivar.ID] = variant\n\t\t}\n\t\t////////////////////////////////\n\t}\n\n\t// cek setiap sales order item\n\tfor i, row := range r.SalesOrderItem {\n\t\t// validasi id\n\t\tif row.ID != \"\" {\n\t\t\tif ID, err := common.Decrypt(row.ID); err != nil {\n\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.id.invalid\", i), \"id is not valid\")\n\t\t\t} else {\n\t\t\t\tsoItem := &model.SalesOrderItem{ID: ID}\n\t\t\t\tif err := soItem.Read(); err != nil {\n\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.id.invalid\", i), \"id is not found\")\n\t\t\t\t}\n\n\t\t\t\t//check duplicate sales order item id\n\t\t\t\tif checkDuplicate[row.ID] == true {\n\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.id.invalid\", i), \"id duplicate\")\n\t\t\t\t} else {\n\t\t\t\t\tcheckDuplicate[row.ID] = true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tvar UnitA float64\n\t\tvar PricingID, IVariantID int64\n\t\tvar ItemVariant *model.ItemVariant\n\t\tvar IVPrice *model.ItemVariantPrice\n\t\t// cek pricing type\n\t\tPricingID, err = common.Decrypt(row.PricingType)\n\t\tif err != nil {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.pricing_type.invalid\", i), \"Pricing type id is invalid\")\n\t\t} else {\n\t\t\tif _, err = pricingType.GetPricingTypeByID(PricingID); err != nil {\n\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.pricing_type.invalid\", i), \"Pricing type id is not found\")\n\t\t\t}\n\t\t}\n\n\t\tIVariantID, err = common.Decrypt(row.ItemVariantID)\n\t\tif err != nil {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.item_variant.invalid\", i), \"Item Variant id is invalid\")\n\t\t}\n\n\t\tvar pt = &model.PricingType{ID: PricingID}\n\t\tpt.Read(\"ID\")\n\t\tvar iv = &model.ItemVariant{ID: IVariantID}\n\t\tiv.Read(\"ID\")\n\n\t\tIVPrice, err = getItemVariantPricing(pt, iv)\n\t\tif err == nil {\n\t\t\tif pt.ParentType != nil {\n\t\t\t\tif pt.RuleType == \"increment\" {\n\t\t\t\t\tif pt.IsPercentage == int8(1) {\n\t\t\t\t\t\ttemp := (pt.Nominal * IVPrice.UnitPrice) / float64(100)\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice + temp\n\t\t\t\t\t} else {\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice + pt.Nominal\n\t\t\t\t\t}\n\n\t\t\t\t\tif row.UnitPrice < UnitA {\n\t\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"Unit price is too small\")\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tif pt.IsPercentage == int8(1) {\n\t\t\t\t\t\ttemp := (pt.Nominal * IVPrice.UnitPrice) / float64(100)\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice - temp\n\t\t\t\t\t} else {\n\t\t\t\t\t\tUnitA = IVPrice.UnitPrice - pt.Nominal\n\t\t\t\t\t}\n\n\t\t\t\t\tif UnitA < 0 {\n\t\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.pricing_type.invalid\", i), \"Pricing type can make price become zero\")\n\t\t\t\t\t}\n\n\t\t\t\t\tif row.UnitPrice < UnitA {\n\t\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"Unit price is too small\")\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif row.UnitPrice < IVPrice.UnitPrice {\n\t\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"Unit price is too small\")\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.unit_price.invalid\", i), \"item variant price doesn't exist\")\n\t\t}\n\n\t\tItemVariant, err = inventory.GetDetailItemVariant(\"id\", IVariantID)\n\t\tif err != nil || ItemVariant == nil || ItemVariant.IsDeleted == int8(1) || ItemVariant.IsArchived == int8(1) {\n\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.item_variant_id.invalid\", i), \"Item variant id not found\")\n\t\t} else {\n\n\t\t\tSoItemID, _ := common.Decrypt(row.ID)\n\t\t\tSoItem := &model.SalesOrderItem{ID: SoItemID}\n\t\t\tif e := SoItem.Read(\"ID\"); e != nil {\n\t\t\t\tSoItem.Quantity = 0\n\t\t\t}\n\n\t\t\t// cek stock item variant\n\t\t\tif ((checkVariant[ItemVariant.ID].AvailableStock - checkVariant[ItemVariant.ID].CommitedStock) + SoItem.Quantity) < row.Quantity {\n\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.quantity.invalid\", i), \"Stock item is not enough to be sold\")\n\t\t\t}\n\n\t\t\tcheckVariant[ItemVariant.ID].CommitedStock += row.Quantity\n\n\t\t\t//check duplicate item variant id\n\t\t\tif checkDuplicate[row.ItemVariantID] == true {\n\t\t\t\to.Failure(fmt.Sprintf(\"sales_order_item.%d.item_variant_id.invalid\", i), \" item variant id duplicate\")\n\t\t\t} else {\n\t\t\t\tcheckDuplicate[row.ItemVariantID] = true\n\t\t\t}\n\n\t\t}\n\n\t\t// Calculate total price\n\t\tdiscamount := (row.UnitPrice * float64(row.Discount)) / float64(100)\n\t\tcuramount := row.UnitPrice * float64(row.Quantity)\n\t\tsubtotal := common.FloatPrecision(curamount-discamount, 0)\n\n\t\tr.TotalPrice += subtotal\n\t}\n\n\tif r.IsPercentageDiscount == int8(1) {\n\t\tif r.Discount < 0 || r.Discount > float32(100) {\n\t\t\to.Failure(\"discount\", \"discount is less than and equal 0 or greater than 100\")\n\t\t}\n\t}\n\n\treturn o\n}", "func (e *RetrieveBalance) Validate(\n\tr *http.Request,\n) error {\n\tctx := r.Context()\n\n\t// Validate id.\n\tid, owner, token, err := ValidateID(ctx, pat.Param(r, \"balance\"))\n\tif err != nil {\n\t\treturn errors.Trace(err)\n\t}\n\te.ID = *id\n\te.Token = *token\n\te.Owner = *owner\n\n\treturn nil\n}", "func Validate(ctx http.IContext, vld *validator.Validate, arg interface{}) bool {\n\n\tif err := ctx.GetRequest().GetBodyAs(arg); err != nil {\n\t\thttp.InternalServerException(ctx)\n\t\treturn false\n\t}\n\n\tswitch err := vld.Struct(arg); err.(type) {\n\tcase validator.ValidationErrors:\n\t\thttp.FailedValidationException(ctx, err.(validator.ValidationErrors))\n\t\treturn false\n\n\tcase nil:\n\t\tbreak\n\n\tdefault:\n\t\thttp.InternalServerException(ctx)\n\t\treturn false\n\t}\n\n\treturn true\n}", "func (p Provisioner) Validate(req json.RawMessage) error {\n\tmodel := &CreateInstanceRequest{}\n\tif err := json.Unmarshal(req, &model); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (r *Response) Validate(lastID int64) error {\n\tif !(r.StatusManager == ResponseStatusReview ||\n\t\tr.StatusManager == ResponseStatusDenied ||\n\t\tr.StatusManager == ResponseStatusAccepted) {\n\t\treturn errors.New(\"wrong manager response status\")\n\t}\n\tif !(r.StatusFreelancer == ResponseStatusReview ||\n\t\tr.StatusFreelancer == ResponseStatusDenied ||\n\t\tr.StatusFreelancer == ResponseStatusAccepted ||\n\t\tr.StatusFreelancer == ResponseStatusBlock) {\n\t\treturn errors.New(\"wrong freelancer response status\")\n\t}\n\tif r.Date.IsZero() {\n\t\treturn errors.New(\"wrong date\")\n\t}\n\tif r.ID != lastID {\n\t\treturn errors.New(\"current id does not match last id\")\n\t}\n\tif r.FreelancerId == 0 || r.JobId == 0 {\n\t\treturn errors.New(\"wrong relationships between tables\")\n\t}\n\treturn nil\n}", "func (swr *SearchWordsRequest) Validate(r *http.Request) (bool, *SearchWordsRespnse) {\n\n\tsearchWordsRespnse := new(SearchWordsRespnse)\n\n\t// Check if body is empty, because we expect some input\n\tif r.Body == nil {\n\n\t\tsearchWordsRespnse.Code = status.EmptyBody\n\t\tsearchWordsRespnse.Errors = append(searchWordsRespnse.Errors, Error{Code: status.EmptyBody, Message: status.Text(status.EmptyBody)})\n\t\treturn false, searchWordsRespnse\n\t}\n\n\t// Decode request\n\terr := json.NewDecoder(r.Body).Decode(&swr)\n\n\tdefer r.Body.Close()\n\n\tif err != nil {\n\t\tsearchWordsRespnse.Code = status.IncorrectBodyFormat\n\t\tsearchWordsRespnse.Errors = append(searchWordsRespnse.Errors, Error{Code: status.IncorrectBodyFormat, Message: status.Text(status.IncorrectBodyFormat)})\n\t\treturn false, searchWordsRespnse\n\t}\n\n\treturn true, searchWordsRespnse\n}", "func validateVufResult(email string, vufResult []byte) error {\n\tdb, err := GetDB()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar vuf []byte\n\terr = db.View(func(tx *bolt.Tx) error {\n\t\tb := tx.Bucket([]byte(\"conf\"))\n\t\tvuf = copySlice(b.Get([]byte(\"vufKey\")))\n\t\treturn nil\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tpkey, err := x509.ParsePKIXPublicKey(vuf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tppkey, ok := pkey.(*rsa.PublicKey)\n\tif !ok {\n\t\treturn errors.New(\"Public key format for the VUF appears incorrect. Should be rsa.PublicKey but unable to cast as such.\")\n\t}\n\n\thashed := sha256.Sum256([]byte(email))\n\treturn rsa.VerifyPKCS1v15(ppkey, crypto.SHA256, hashed[:], vufResult)\n}", "func (r *uploadRequest) Validate(maxFileSizeBytes config.FileSizeBytes) *util.JSONResponse {\n\tif maxFileSizeBytes > 0 && r.MediaMetadata.FileSizeBytes > types.FileSizeBytes(maxFileSizeBytes) {\n\t\treturn requestEntityTooLargeJSONResponse(maxFileSizeBytes)\n\t}\n\tif strings.HasPrefix(string(r.MediaMetadata.UploadName), \"~\") {\n\t\treturn &util.JSONResponse{\n\t\t\tCode: http.StatusBadRequest,\n\t\t\tJSON: spec.Unknown(\"File name must not begin with '~'.\"),\n\t\t}\n\t}\n\t// TODO: Validate filename - what are the valid characters?\n\tif r.MediaMetadata.UserID != \"\" {\n\t\t// TODO: We should put user ID parsing code into gomatrixserverlib and use that instead\n\t\t// (see https://github.com/matrix-org/gomatrixserverlib/blob/3394e7c7003312043208aa73727d2256eea3d1f6/eventcontent.go#L347 )\n\t\t// It should be a struct (with pointers into a single string to avoid copying) and\n\t\t// we should update all refs to use UserID types rather than strings.\n\t\t// https://github.com/matrix-org/synapse/blob/v0.19.2/synapse/types.py#L92\n\t\tif _, _, err := gomatrixserverlib.SplitID('@', string(r.MediaMetadata.UserID)); err != nil {\n\t\t\treturn &util.JSONResponse{\n\t\t\t\tCode: http.StatusBadRequest,\n\t\t\t\tJSON: spec.BadJSON(\"user id must be in the form @localpart:domain\"),\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (upvc *UnsavedPostVideoCreate) check() error {\n\tif _, ok := upvc.mutation.UUID(); !ok {\n\t\treturn &ValidationError{Name: \"uuid\", err: errors.New(\"ent: missing required field \\\"uuid\\\"\")}\n\t}\n\tif v, ok := upvc.mutation.UUID(); ok {\n\t\tif err := unsavedpostvideo.UUIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"uuid\", err: fmt.Errorf(\"ent: validator failed for field \\\"uuid\\\": %w\", err)}\n\t\t}\n\t}\n\tif _, ok := upvc.mutation.Validity(); !ok {\n\t\treturn &ValidationError{Name: \"validity\", err: errors.New(\"ent: missing required field \\\"validity\\\"\")}\n\t}\n\tif v, ok := upvc.mutation.Validity(); ok {\n\t\tif err := unsavedpostvideo.ValidityValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"validity\", err: fmt.Errorf(\"ent: validator failed for field \\\"validity\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := upvc.mutation.Title(); ok {\n\t\tif err := unsavedpostvideo.TitleValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"title\", err: fmt.Errorf(\"ent: validator failed for field \\\"title\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := upvc.mutation.URL(); ok {\n\t\tif err := unsavedpostvideo.URLValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"url\", err: fmt.Errorf(\"ent: validator failed for field \\\"url\\\": %w\", err)}\n\t\t}\n\t}\n\tif _, ok := upvc.mutation.CreatedAt(); !ok {\n\t\treturn &ValidationError{Name: \"created_at\", err: errors.New(\"ent: missing required field \\\"created_at\\\"\")}\n\t}\n\tif _, ok := upvc.mutation.UnsavedPostID(); !ok {\n\t\treturn &ValidationError{Name: \"unsaved_post\", err: errors.New(\"ent: missing required edge \\\"unsaved_post\\\"\")}\n\t}\n\treturn nil\n}", "func (v GetApiHostnameCoverageOverlappingRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t}.Filter()\n}", "func (r *ReconcileCanary) IsValid(obj metav1.Object) (bool, error) {\n\t//log.Info(fmt.Sprintf(\"IsValid? %s\", obj))\n\n\tcanary, ok := obj.(*kharonv1alpha1.Canary)\n\tif !ok {\n\t\terr := errors.NewBadRequest(errorNotACanaryObject)\n\t\tlog.Error(err, errorNotACanaryObject)\n\t\treturn false, err\n\t}\n\n\t// Check if TargetRef is empty\n\tif (kharonv1alpha1.Ref{}) == canary.Spec.TargetRef {\n\t\terr := errors.NewBadRequest(errorTargetRefEmpty)\n\t\tlog.Error(err, errorTargetRefEmpty)\n\t\treturn false, err\n\t}\n\n\t// Check if TargetRefContainerPort is empty\n\tif len(canary.Spec.TargetRefContainerPort.StrVal) <= 0 && canary.Spec.TargetRefContainerPort.IntVal <= 0 {\n\t\terr := errors.NewBadRequest(errorTargetRefContainerPortEmpty)\n\t\tlog.Error(err, errorTargetRefContainerPortEmpty)\n\t\treturn false, err\n\t}\n\n\t// Check kind of target\n\tif canary.Spec.TargetRef.Kind != \"Deployment\" && canary.Spec.TargetRef.Kind != \"DeploymentConfig\" {\n\t\terr := errors.NewBadRequest(errorTargetRefKind)\n\t\tlog.Error(err, errorTargetRefKind)\n\t\treturn false, err\n\t}\n\n\t// Check if ServiceName is empty\n\tif len(canary.Spec.ServiceName) <= 0 {\n\t\terr := errors.NewBadRequest(errorServiceNameEmpty)\n\t\tlog.Error(err, errorServiceNameEmpty)\n\t\treturn false, err\n\t}\n\n\t// Check if CanaryAnalysis is empty\n\tif (kharonv1alpha1.CanaryAnalysis{}) == canary.Spec.CanaryAnalysis {\n\t\terr := errors.NewBadRequest(errorCanaryAnalysisEmpty)\n\t\tlog.Error(err, errorCanaryAnalysisEmpty)\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func (p *BaseChannelProposal) Valid() error {\n\tif p.InitBals == nil {\n\t\treturn errors.New(\"invalid nil fields\")\n\t} else if err := channel.ValidateProposalParameters(\n\t\tp.ChallengeDuration, p.NumPeers(), p.App); err != nil {\n\t\treturn errors.WithMessage(err, \"invalid channel parameters\")\n\t} else if err := p.InitBals.Valid(); err != nil {\n\t\treturn err\n\t} else if len(p.InitBals.Locked) != 0 {\n\t\treturn errors.New(\"initial allocation cannot have locked funds\")\n\t}\n\treturn nil\n}", "func (o *AcceptInvitationBody) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.validateAlias(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (mt *Bottle) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"name\"))\n\t}\n\n\tif utf8.RuneCountInString(mt.Name) < 1 {\n\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`response.name`, mt.Name, utf8.RuneCountInString(mt.Name), 1, true))\n\t}\n\tif mt.Rating < 1 {\n\t\terr = goa.MergeErrors(err, goa.InvalidRangeError(`response.rating`, mt.Rating, 1, true))\n\t}\n\tif mt.Rating > 5 {\n\t\terr = goa.MergeErrors(err, goa.InvalidRangeError(`response.rating`, mt.Rating, 5, false))\n\t}\n\tif mt.Vintage < 1900 {\n\t\terr = goa.MergeErrors(err, goa.InvalidRangeError(`response.vintage`, mt.Vintage, 1900, true))\n\t}\n\treturn\n}", "func (ec *ExperienceCreate) check() error {\n\tif _, ok := ec.mutation.UUID(); !ok {\n\t\treturn &ValidationError{Name: \"uuid\", err: errors.New(`ent: missing required field \"uuid\"`)}\n\t}\n\tif _, ok := ec.mutation.CreatedAt(); !ok {\n\t\treturn &ValidationError{Name: \"created_at\", err: errors.New(`ent: missing required field \"created_at\"`)}\n\t}\n\tif _, ok := ec.mutation.Title(); !ok {\n\t\treturn &ValidationError{Name: \"title\", err: errors.New(`ent: missing required field \"title\"`)}\n\t}\n\tif v, ok := ec.mutation.Title(); ok {\n\t\tif err := experience.TitleValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"title\", err: fmt.Errorf(`ent: validator failed for field \"title\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := ec.mutation.Content(); !ok {\n\t\treturn &ValidationError{Name: \"content\", err: errors.New(`ent: missing required field \"content\"`)}\n\t}\n\tif v, ok := ec.mutation.Content(); ok {\n\t\tif err := experience.ContentValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"content\", err: fmt.Errorf(`ent: validator failed for field \"content\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := ec.mutation.Views(); !ok {\n\t\treturn &ValidationError{Name: \"views\", err: errors.New(`ent: missing required field \"views\"`)}\n\t}\n\tif v, ok := ec.mutation.Views(); ok {\n\t\tif err := experience.ViewsValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"views\", err: fmt.Errorf(`ent: validator failed for field \"views\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := ec.mutation.Likes(); !ok {\n\t\treturn &ValidationError{Name: \"likes\", err: errors.New(`ent: missing required field \"likes\"`)}\n\t}\n\tif v, ok := ec.mutation.Likes(); ok {\n\t\tif err := experience.LikesValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"likes\", err: fmt.Errorf(`ent: validator failed for field \"likes\": %w`, err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (m *APIMatchFunctionRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateMatchFunction(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateURL(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o Request) Valid() error {\n\tif len(o.GroupBy) == 0 {\n\t\treturn skerr.Fmt(\"at least one GroupBy value must be supplied.\")\n\t}\n\n\tvalid := false\n\tfor _, op := range AllOperations {\n\t\tif op == o.Operation {\n\t\t\tvalid = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif !valid {\n\t\treturn skerr.Fmt(\"invalid Operation value: %q\", o.Operation)\n\t}\n\n\tvalid = false\n\tfor _, incomingOp := range o.Summary {\n\t\tfor _, op := range AllOperations {\n\t\t\tif op == incomingOp {\n\t\t\t\tvalid = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !valid {\n\t\t\treturn skerr.Fmt(\"invalid Summary value: %q\", incomingOp)\n\t\t}\n\t}\n\treturn nil\n}", "func (payload *CreateVerificationPayload) Validate() (err error) {\n\n\treturn\n}", "func validateCreateTestExonerationRequest(req *pb.CreateTestExonerationRequest, requireInvocation bool) error {\n\tif requireInvocation || req.Invocation != \"\" {\n\t\tif err := pbutil.ValidateInvocationName(req.Invocation); err != nil {\n\t\t\treturn errors.Annotate(err, \"invocation\").Err()\n\t\t}\n\t}\n\n\tex := req.GetTestExoneration()\n\tif err := pbutil.ValidateTestID(ex.GetTestId()); err != nil {\n\t\treturn errors.Annotate(err, \"test_exoneration: test_id\").Err()\n\t}\n\tif err := pbutil.ValidateVariant(ex.GetVariant()); err != nil {\n\t\treturn errors.Annotate(err, \"test_exoneration: variant\").Err()\n\t}\n\n\thasVariant := len(ex.GetVariant().GetDef()) != 0\n\thasVariantHash := ex.VariantHash != \"\"\n\tif hasVariant && hasVariantHash {\n\t\tcomputedHash := pbutil.VariantHash(ex.GetVariant())\n\t\tif computedHash != ex.VariantHash {\n\t\t\treturn errors.Reason(\"computed and supplied variant hash don't match\").Err()\n\t\t}\n\t}\n\n\tif err := pbutil.ValidateRequestID(req.RequestId); err != nil {\n\t\treturn errors.Annotate(err, \"request_id\").Err()\n\t}\n\n\tif ex.ExplanationHtml == \"\" {\n\t\treturn errors.Reason(\"test_exoneration: explanation_html: unspecified\").Err()\n\t}\n\tif ex.Reason == pb.ExonerationReason_EXONERATION_REASON_UNSPECIFIED {\n\t\treturn errors.Reason(\"test_exoneration: reason: unspecified\").Err()\n\t}\n\treturn nil\n}", "func (e *CancelTransaction) Validate(\n\tr *http.Request,\n) error {\n\tctx := r.Context()\n\n\tswitch authentication.Get(ctx).Status {\n\tcase authentication.AutStSkipped:\n\t\t// Validate hop.\n\t\thop, err := ValidateHop(ctx, r.PostFormValue(\"hop\"))\n\t\tif err != nil {\n\t\t\treturn errors.Trace(err)\n\t\t}\n\t\te.Hop = *hop\n\t}\n\n\t// Validate id.\n\tid, owner, token, err := ValidateID(ctx, pat.Param(r, \"transaction\"))\n\tif err != nil {\n\t\treturn errors.Trace(err)\n\t}\n\te.ID = *id\n\te.Token = *token\n\te.Owner = *owner\n\n\treturn nil\n}", "func (m *UpdateMegaParProfileResponse) Validate() error {\n\treturn nil\n}", "func (m *FalconxMITREAttackParent) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *Request) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetS1()) < 1 {\n\t\treturn RequestValidationError{\n\t\t\tfield: \"S1\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (sel SelRoulette) Validate() error {\n\treturn nil\n}", "func (r HTTP) validate() error {\n\tif r.IsEmpty() {\n\t\treturn nil\n\t}\n\t// we consider the fact that primary routing rule is mandatory before you write any additional routing rules.\n\tif err := r.Main.validate(); err != nil {\n\t\treturn err\n\t}\n\tif r.Main.TargetContainer != nil && r.TargetContainerCamelCase != nil {\n\t\treturn &errFieldMutualExclusive{\n\t\t\tfirstField: \"target_container\",\n\t\t\tsecondField: \"targetContainer\",\n\t\t}\n\t}\n\n\tfor idx, rule := range r.AdditionalRoutingRules {\n\t\tif err := rule.validate(); err != nil {\n\t\t\treturn fmt.Errorf(`validate \"additional_rules[%d]\": %w`, idx, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (m *RequestAgent) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (t *SimpleChaincode) validate(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar key, jsonResp string\n\tvar err,err_state,err_contract error\n\n\tif len(args) != 3 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting name of the key to query\")\n\t}\n\n //parameters productid,contractkey,bytearray\n //data := `{\"product_id\":\"IOT1124s\",\"Contractid\":\"232241123\",\"stake_holders\":[\"Saurabh_id123\",\"Vinit_Ajay123\"],\"sensor_value\":\"24\",\"payment_percent\":\"20\"}`\n \t\n\tStateJsonAsbytes := []byte(args[0])\n\tcontractkey := args[1]\n\t//productid := args[2]\n\n\tContractvalAsbytes, err := stub.GetState(contractkey)\n\tif err != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + key + \"\\\"}\"\n\n\t\treturn nil, errors.New(jsonResp)\n\t}\n\n var f interface{} //Interface for marshalling the data received from blockchain contract used for comparison.\n var g interface{} //Interface for receiving and marshalling the received data \n\n\t\terr_contract = json.Unmarshal(ContractvalAsbytes, &f)\n\t\tif (err_contract!=nil) {\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\terr_state = json.Unmarshal(StateJsonAsbytes, &g)\n\t\tif (err_state!=nil) {\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\tcontract_json := f.(map[string]interface{})\n\n\t state_json := g.(map[string]interface{})\n\n\t // The Key value iteration can be done better for dynamicity as a seperate function. to loop over the two structs. \n\n var sensor_value,sensor_contract string \n\n\t\tfor k, v := range contract_json {\n \t if k == \"sensor_value\" {\n \n fmt.Println(k, \"is to be compared\", v)\n sensor_value=v.(string) \n \n }\n\n\t\t}\n \n for k, v := range state_json {\n \t if k == \"sensor_value\" {\n \n fmt.Println(k, \"is to be compared\", v)\n sensor_contract=v.(string) \n }\n\n\t\t}\n \n\n val1,_ := strconv.Atoi(sensor_value)\n val2,_ := strconv.Atoi(sensor_contract)\n\n var exception string\n\n if ( val1 < val2 ) {\n exception = `{\"result\":\"Exception: value Not acceptable\",\"status\":\"failed\"}`\n }else{\n exception = `{\"result\":\"Success\",\"status\":\"success\"}`\n }\n\n\t\texceptionAsBytes := []byte(exception)\n\n /*Section to validate the two jsons and put state only if data is validated*/ \n \n //Smart Contract Rules :\n\n // case : blockchain.sensor_value==received.sensor_value\n \n // case : blockchain.expiry_max== received.expiry\n\n //if true : insert in to blockchain.\n \n\n\treturn exceptionAsBytes, nil\n}", "func Validator(request events.APIGatewayProxyRequest) (bool, string, int, searchutil.SearchPeriod) {\n\tif request.HTTPMethod != \"GET\" {\n\t\treturn false, \"{\\\"msg\\\": \\\"only HTTP GET is allowed on this resource\\\"}\", -1, searchutil.Day\n\t}\n\tnumber, pres := request.QueryStringParameters[\"number\"]\n\tif !pres {\n\t\tnumber = \"10\"\n\t}\n\tnum, _ := strconv.Atoi(number)\n\tif num <= 0 {\n\t\treturn false, \"{\\\"msg\\\": \\\"number nmust be greater than 0\\\"}\", -1, searchutil.Day\n\t}\n\tduration, _ := request.QueryStringParameters[\"duration\"]\n\treturn true, \"\", num, ConvertDurationToSearchPeriod(duration)\n}", "func (r *TransferRequest) Validate() error {\n\tif r.Amount <= 0 {\n\t\treturn fmt.Errorf(\"invalid parameter: amount of the deposit must be positive\")\n\t}\n\tif r.WalletFrom == 0 {\n\t\treturn fmt.Errorf(\"invalid parameter: wallet_from required\")\n\t}\n\tif r.WalletTo == 0 {\n\t\treturn fmt.Errorf(\"invalid parameter: wallet_to required\")\n\t}\n\tif r.WalletFrom == r.WalletTo {\n\t\treturn fmt.Errorf(\"source wallet must not match destination wallet\")\n\t}\n\treturn nil\n}", "func (r *Route) Validate(ctx context.Context) (errs *apis.FieldError) {\n\t// If we're specifically updating status, don't reject the change because\n\t// of a spec issue.\n\tif apis.IsInStatusUpdate(ctx) {\n\t\treturn\n\t}\n\n\tif r.Name == \"\" {\n\t\terrs = errs.Also(apis.ErrMissingField(\"name\"))\n\t}\n\n\terrs = errs.Also(r.Spec.Validate(apis.WithinSpec(ctx)).ViaField(\"spec\"))\n\n\t// If we have errors, bail. No need to do the network call.\n\tif errs.Error() != \"\" {\n\t\treturn errs\n\t}\n\n\treturn checkVirtualServiceCollision(ctx, r.Spec.Hostname, r.Spec.Domain, r.GetNamespace(), errs)\n}", "func (m *PTXServiceDTOBusSpecificationV2N1Estimate) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *PostPunchInOKBody) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.validateResponse(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (mt *Vironpage) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"name\"))\n\t}\n\tif mt.Section == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"section\"))\n\t}\n\tif mt.Group == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"group\"))\n\t}\n\tif mt.Components == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"components\"))\n\t}\n\tfor _, e := range mt.Components {\n\t\tif e != nil {\n\t\t\tif err2 := e.Validate(); err2 != nil {\n\t\t\t\terr = goa.MergeErrors(err, err2)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (m *JsonToMetadata_MatchRules) Validate() error {\n\treturn m.validate(false)\n}", "func (m *HeaderMatch) Validate() error {\n\treturn m.validate(false)\n}", "func (r *ArchiveRequest) Validate() error {\n\tif err := requireProject(r.GetProject()); err != nil {\n\t\treturn err\n\t}\n\tswitch {\n\tcase r.Format == ArchiveRequest_Invalid:\n\t\treturn errors.New(\"format must be valid\")\n\tcase r.Ref == \"\":\n\t\treturn errors.New(\"ref is required\")\n\tdefault:\n\t\treturn nil\n\t}\n}", "func (mt *EasypostTracker) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Object == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"object\"))\n\t}\n\n\tfor _, e := range mt.CarrierDetail {\n\t\tif e.Object == \"\" {\n\t\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response.carrier_detail[*]`, \"object\"))\n\t\t}\n\n\t\tif ok := goa.ValidatePattern(`^CarrierDetail$`, e.Object); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.carrier_detail[*].object`, e.Object, `^CarrierDetail$`))\n\t\t}\n\t}\n\tfor _, e := range mt.Fees {\n\t\tif ok := goa.ValidatePattern(`^Fee$`, e.Object); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.fees[*].object`, e.Object, `^Fee$`))\n\t\t}\n\t\tif e.Type != nil {\n\t\t\tif !(*e.Type == \"LabelFee\" || *e.Type == \"PostageFee\" || *e.Type == \"InsuranceFee\" || *e.Type == \"TrackerFee\") {\n\t\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.fees[*].type`, *e.Type, []interface{}{\"LabelFee\", \"PostageFee\", \"InsuranceFee\", \"TrackerFee\"}))\n\t\t\t}\n\t\t}\n\t}\n\tif ok := goa.ValidatePattern(`^trk_`, mt.ID); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.id`, mt.ID, `^trk_`))\n\t}\n\tif !(mt.Mode == \"test\" || mt.Mode == \"production\") {\n\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.mode`, mt.Mode, []interface{}{\"test\", \"production\"}))\n\t}\n\tif ok := goa.ValidatePattern(`^Tracker$`, mt.Object); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.object`, mt.Object, `^Tracker$`))\n\t}\n\tif mt.Status != nil {\n\t\tif !(*mt.Status == \"pre_transit\" || *mt.Status == \"in_transit\" || *mt.Status == \"out_for_delivery\" || *mt.Status == \"delivered\" || *mt.Status == \"available_for_pickup\" || *mt.Status == \"return_to_sender\" || *mt.Status == \"failure\" || *mt.Status == \"cancelled\" || *mt.Status == \"error\" || *mt.Status == \"unknown\") {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.status`, *mt.Status, []interface{}{\"pre_transit\", \"in_transit\", \"out_for_delivery\", \"delivered\", \"available_for_pickup\", \"return_to_sender\", \"failure\", \"cancelled\", \"error\", \"unknown\"}))\n\t\t}\n\t}\n\tfor _, e := range mt.TrackingDetails {\n\t\tif e.Object == \"\" {\n\t\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response.tracking_details[*]`, \"object\"))\n\t\t}\n\n\t\tif ok := goa.ValidatePattern(`^TrackingDetail$`, e.Object); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.tracking_details[*].object`, e.Object, `^TrackingDetail$`))\n\t\t}\n\t\tif e.Status != nil {\n\t\t\tif !(*e.Status == \"pre_transit\" || *e.Status == \"in_transit\" || *e.Status == \"out_for_delivery\" || *e.Status == \"delivered\" || *e.Status == \"available_for_pickup\" || *e.Status == \"return_to_sender\" || *e.Status == \"failure\" || *e.Status == \"cancelled\" || *e.Status == \"error\" || *e.Status == \"unknown\") {\n\t\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.tracking_details[*].status`, *e.Status, []interface{}{\"pre_transit\", \"in_transit\", \"out_for_delivery\", \"delivered\", \"available_for_pickup\", \"return_to_sender\", \"failure\", \"cancelled\", \"error\", \"unknown\"}))\n\t\t\t}\n\t\t}\n\t\tif e.TrackingLocation != nil {\n\t\t\tif err2 := e.TrackingLocation.Validate(); err2 != nil {\n\t\t\t\terr = goa.MergeErrors(err, err2)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (m *HostedPageUpdateCardRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateCardGateway(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (mt *Vironapi) Validate() (err error) {\n\tif mt.Method == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"method\"))\n\t}\n\tif mt.Path == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"path\"))\n\t}\n\treturn\n}", "func (r *SignupRequest) Validate(body string) *SignupResponse {\n\tresponse := new(SignupResponse)\n\tresponse.Code = 0\n\tresponse.RequestID = strconv.FormatInt(time.Now().Unix(), 10)\n\n\terr := json.Unmarshal([]byte(body), r)\n\tif err != nil {\n\t\terrData := es.ErrRegistrationIncorrectRequest\n\t\terrData.Data = err.Error()\n\t\tresponse.Errors = append(response.Errors, errData)\n\n\t\tresponse.Code = es.StatusRegistrationError\n\t\treturn response\n\t}\n\n\tif len(r.Gender) < 1 {\n\t\tr.Gender = \"male\"\n\t}\n\n\tif !validateEmail(r.Email) {\n\t\tresponse.Errors = append(response.Errors, es.ErrRegistrationMissingEmail)\n\t\tresponse.Code = es.StatusRegistrationError\n\t}\n\n\tif !validatePassword(r.Password) {\n\t\tif len(r.Password) < 1 {\n\t\t\tr.Password = \"@TempPass1\" + h.RandSeq(5)\n\t\t} else {\n\t\t\tresponse.Errors = append(response.Errors, es.ErrRegistrationMissingPass)\n\t\t\tresponse.Code = es.StatusRegistrationError\n\t\t}\n\n\t}\n\n\treturn response\n}", "func (mt *ComJossemargtSaoDraft) Validate() (err error) {\n\n\tif mt.Href == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"href\"))\n\t}\n\tif ok := goa.ValidatePattern(`[_a-zA-Z0-9\\-]+`, mt.ContestSlug); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.contestSlug`, mt.ContestSlug, `[_a-zA-Z0-9\\-]+`))\n\t}\n\tif ok := goa.ValidatePattern(`[_a-zA-Z0-9\\-]+`, mt.TaskSlug); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.taskSlug`, mt.TaskSlug, `[_a-zA-Z0-9\\-]+`))\n\t}\n\treturn\n}", "func (mt *ComJossemargtSaoDraft) Validate() (err error) {\n\n\tif mt.Href == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"href\"))\n\t}\n\tif ok := goa.ValidatePattern(`[_a-zA-Z0-9\\-]+`, mt.ContestSlug); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.contestSlug`, mt.ContestSlug, `[_a-zA-Z0-9\\-]+`))\n\t}\n\tif ok := goa.ValidatePattern(`[_a-zA-Z0-9\\-]+`, mt.TaskSlug); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`response.taskSlug`, mt.TaskSlug, `[_a-zA-Z0-9\\-]+`))\n\t}\n\treturn\n}", "func (x *fastReflection_Bech32PrefixRequest) IsValid() bool {\n\treturn x != nil\n}", "func (f Downstream) validate() error {\n\tif f.BounceIntervalHour < 1 && f.BounceIntervalHour != 0 {\n\t\treturn errors.New(\"invalid downstream.bounceIntervalHour value, should >= 1\")\n\t}\n\n\tif f.BounceIntervalHour > 48 {\n\t\treturn errors.New(\"invalid downstream.bounceIntervalHour value, should <= 48(two days)\")\n\t}\n\n\tif f.NotifyMaxLimit < 10 && f.NotifyMaxLimit != 0 {\n\t\treturn errors.New(\"invalid downstream.notifyMaxLimit value, should >= 10\")\n\t}\n\n\treturn nil\n}", "func (r ResendConfirmationRequest) Validate() error {\n\treturn validation.ValidateStruct(&r,\n\t\tvalidUsername(&r.Username),\n\t)\n}", "func (profileImport *ProfileImportRequest) Validate(tx *sql.Tx) error {\n\n\tprofile := profileImport.Profile\n\n\t// Profile fields are valid\n\terrs := tovalidate.ToErrors(validation.Errors{\n\t\t\"name\": validation.Validate(profile.Name, validation.By(\n\t\t\tfunc(value interface{}) error {\n\t\t\t\tname, ok := value.(*string)\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"wrong type, need: string, got: %T\", value)\n\t\t\t\t}\n\t\t\t\tif name == nil || *name == \"\" {\n\t\t\t\t\treturn errors.New(\"required and cannot be blank\")\n\t\t\t\t}\n\t\t\t\tif strings.Contains(*name, \" \") {\n\t\t\t\t\treturn errors.New(\"cannot contain spaces\")\n\t\t\t\t}\n\t\t\t\treturn nil\n\t\t\t},\n\t\t)),\n\t\t\"description\": validation.Validate(profile.Description, validation.Required),\n\t\t\"cdnName\": validation.Validate(profile.CDNName, validation.Required),\n\t\t\"type\": validation.Validate(profile.Type, validation.Required),\n\t})\n\n\t// Validate CDN exist\n\tif profile.CDNName != nil {\n\t\tif ok, err := CDNExistsByName(*profile.CDNName, tx); err != nil {\n\t\t\terrString := fmt.Sprintf(\"checking cdn name %v existence\", *profile.CDNName)\n\t\t\tlog.Errorf(\"%v: %v\", errString, err.Error())\n\t\t\terrs = append(errs, errors.New(errString))\n\t\t} else if !ok {\n\t\t\terrs = append(errs, fmt.Errorf(\"%v CDN does not exist\", *profile.CDNName))\n\t\t}\n\t}\n\n\t// Validate profile does not already exist\n\tif profile.Name != nil {\n\t\tif ok, err := ProfileExistsByName(*profile.Name, tx); err != nil {\n\t\t\terrString := fmt.Sprintf(\"checking profile name %v existence\", *profile.Name)\n\t\t\tlog.Errorf(\"%v: %v\", errString, err.Error())\n\t\t\terrs = append(errs, errors.New(errString))\n\t\t} else if ok {\n\t\t\terrs = append(errs, fmt.Errorf(\"a profile with the name \\\"%s\\\" already exists\", *profile.Name))\n\t\t}\n\t}\n\n\t// Validate all parameters\n\t// export/import does not include secure flag\n\t// default value to not flag on validation\n\tsecure := 1\n\tfor i, pp := range profileImport.Parameters {\n\t\tif ppErrs := validateProfileParamPostFields(pp.ConfigFile, pp.Name, pp.Value, &secure); len(ppErrs) > 0 {\n\t\t\tfor _, err := range ppErrs {\n\t\t\t\terrs = append(errs, errors.New(\"parameter \"+strconv.Itoa(i)+\": \"+err.Error()))\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(errs) > 0 {\n\t\treturn util.JoinErrs(errs)\n\t}\n\n\treturn nil\n}", "func (pt MDTurbo) Validate() bool {\n\tif pt.Magic != 52426 {\n\t\treturn false\n\t}\n\tif pt.Partitions1[0].Start != 256 {\n\t\treturn false\n\t}\n\treturn true\n}", "func (m *RequestDTO) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func queryValidatorCheck(expStatus sdk.BondStatus, expJailed bool, expDS *sdk.Dec, expMsd *sdk.Dec, expUnbdHght *int64) actResChecker {\n\treturn func(t *testing.T, beforeStatus, afterStatus IValidatorStatus, resultCtx *ActionResultCtx) bool {\n\t\tq := keeper.NewQuerier(resultCtx.tc.mockKeeper.Keeper)\n\t\tctx := getNewContext(resultCtx.tc.mockKeeper.MountedStore, resultCtx.tc.currentHeight)\n\n\t\tbasicParams := types.NewQueryValidatorParams(afterStatus.getValidator().OperatorAddress)\n\t\tbz, _ := amino.MarshalJSON(basicParams)\n\t\tres, err := q(ctx, []string{types.QueryValidator}, abci.RequestQuery{Data: bz})\n\t\trequire.Nil(t, err)\n\n\t\tvalidator := types.Validator{}\n\t\trequire.NoError(t, amino.UnmarshalJSON(res, &validator))\n\n\t\tb1 := assert.Equal(t, validator.GetStatus(), expStatus, validator.Standardize().String())\n\t\tb2 := assert.Equal(t, validator.IsJailed(), expJailed, validator.Standardize().String())\n\n\t\tb3 := true\n\t\tif expDS != nil {\n\t\t\tb3 = assert.Equal(t, *expDS, validator.GetDelegatorShares(), validator.Standardize().String())\n\t\t}\n\n\t\tb4 := true\n\t\tif expMsd != nil {\n\t\t\tb4 = assert.Equal(t, *expMsd, validator.GetMinSelfDelegation(), validator.Standardize().String())\n\t\t}\n\n\t\tb5 := true\n\t\tif expUnbdHght != nil {\n\t\t\tb5 = assert.Equal(t, *expUnbdHght, validator.UnbondingHeight, validator.Standardize().String())\n\t\t}\n\n\t\tb6 := assert.True(t, validator.DelegatorShares.GTE(sdk.ZeroDec()), validator)\n\t\tb7 := assert.True(t, validatorConstraintCheck(validator)(t, beforeStatus, afterStatus, resultCtx), validator)\n\n\t\treturn b1 && b2 && b3 && b4 && b5 && b6 && b7\n\t}\n}", "func (r *QueryRequest) Valid() error {\n\tif !r.OrganizationID.Valid() {\n\t\treturn &errors.Error{\n\t\t\tMsg: \"organization_id is not valid\",\n\t\t\tCode: errors.EInvalid,\n\t\t}\n\t}\n\treturn r.Authorization.Valid()\n}", "func (r *ListFilesRequest) Validate() error {\n\tif err := requireProject(r.GetProject()); err != nil {\n\t\treturn err\n\t}\n\tif err := requireCommittish(\"committish\", r.GetCommittish()); err != nil {\n\t\treturn err\n\t}\n\tif strings.HasSuffix(r.GetCommittish(), \"/\") {\n\t\treturn errors.New(\"committish must not end with /\")\n\t}\n\tif strings.HasPrefix(r.Path, \"/\") {\n\t\treturn errors.New(\"path must not start with /\")\n\t}\n\treturn nil\n}", "func (client RoleAssignmentScheduleRequestsClient) ValidateResponder(resp *http.Response) (result RoleAssignmentScheduleRequest, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (m *ConfigureAssessmentResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (mt *Vironcomponent) Validate() (err error) {\n\tif mt.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"name\"))\n\t}\n\tif mt.Style == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"style\"))\n\t}\n\tif mt.API == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"api\"))\n\t}\n\tif mt.API != nil {\n\t\tif err2 := mt.API.Validate(); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\tfor _, e := range mt.Query {\n\t\tif e != nil {\n\t\t\tif err2 := e.Validate(); err2 != nil {\n\t\t\t\terr = goa.MergeErrors(err, err2)\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (req *RestartVbucketsRequest) Validate() bool {\n\treturn validateMapping(req.GetRestartTimestamps(), req.GetKeyspaceIds())\n}", "func (c Chain) Validate() error {\n\t_, err := c.Program()\n\treturn err\n}", "func (drc *DeviceRequestCreate) check() error {\n\tif _, ok := drc.mutation.UserCode(); !ok {\n\t\treturn &ValidationError{Name: \"user_code\", err: errors.New(`db: missing required field \"DeviceRequest.user_code\"`)}\n\t}\n\tif v, ok := drc.mutation.UserCode(); ok {\n\t\tif err := devicerequest.UserCodeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"user_code\", err: fmt.Errorf(`db: validator failed for field \"DeviceRequest.user_code\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := drc.mutation.DeviceCode(); !ok {\n\t\treturn &ValidationError{Name: \"device_code\", err: errors.New(`db: missing required field \"DeviceRequest.device_code\"`)}\n\t}\n\tif v, ok := drc.mutation.DeviceCode(); ok {\n\t\tif err := devicerequest.DeviceCodeValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"device_code\", err: fmt.Errorf(`db: validator failed for field \"DeviceRequest.device_code\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := drc.mutation.ClientID(); !ok {\n\t\treturn &ValidationError{Name: \"client_id\", err: errors.New(`db: missing required field \"DeviceRequest.client_id\"`)}\n\t}\n\tif v, ok := drc.mutation.ClientID(); ok {\n\t\tif err := devicerequest.ClientIDValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"client_id\", err: fmt.Errorf(`db: validator failed for field \"DeviceRequest.client_id\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := drc.mutation.ClientSecret(); !ok {\n\t\treturn &ValidationError{Name: \"client_secret\", err: errors.New(`db: missing required field \"DeviceRequest.client_secret\"`)}\n\t}\n\tif v, ok := drc.mutation.ClientSecret(); ok {\n\t\tif err := devicerequest.ClientSecretValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"client_secret\", err: fmt.Errorf(`db: validator failed for field \"DeviceRequest.client_secret\": %w`, err)}\n\t\t}\n\t}\n\tif _, ok := drc.mutation.Expiry(); !ok {\n\t\treturn &ValidationError{Name: \"expiry\", err: errors.New(`db: missing required field \"DeviceRequest.expiry\"`)}\n\t}\n\treturn nil\n}", "func (aru *AppointmentResultsUpdate) check() error {\n\tif v, ok := aru.mutation.CauseAppoint(); ok {\n\t\tif err := appointmentresults.CauseAppointValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"causeAppoint\", err: fmt.Errorf(\"ent: validator failed for field \\\"causeAppoint\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := aru.mutation.Advice(); ok {\n\t\tif err := appointmentresults.AdviceValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"advice\", err: fmt.Errorf(\"ent: validator failed for field \\\"advice\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := aru.mutation.HourBeforeAppoint(); ok {\n\t\tif err := appointmentresults.HourBeforeAppointValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"hourBeforeAppoint\", err: fmt.Errorf(\"ent: validator failed for field \\\"hourBeforeAppoint\\\": %w\", err)}\n\t\t}\n\t}\n\tif v, ok := aru.mutation.MinuteBeforeAppoint(); ok {\n\t\tif err := appointmentresults.MinuteBeforeAppointValidator(v); err != nil {\n\t\t\treturn &ValidationError{Name: \"minuteBeforeAppoint\", err: fmt.Errorf(\"ent: validator failed for field \\\"minuteBeforeAppoint\\\": %w\", err)}\n\t\t}\n\t}\n\treturn nil\n}", "func (m *AssessEvidenceResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (p PingreqPacket) Validate() error { return nil }", "func (c Config) Validate() error {\n\tif len(c.ClientID) == 0 {\n\t\treturn fmt.Errorf(\"error: ClientID missing\")\n\t}\n\n\tif c.VodID < 1 {\n\t\treturn fmt.Errorf(\"error: VodID missing\")\n\t}\n\n\ttimePattern := `\\d+ \\d+ \\d+`\n\ttimeRegex := regexp.MustCompile(timePattern)\n\tif c.StartTime != \"start\" && !timeRegex.MatchString(c.StartTime) {\n\t\treturn fmt.Errorf(\"error: StartTime must be 'start' or in format '%s'; got '%s'\", timePattern, c.StartTime)\n\t}\n\tif c.EndTime == \"\" && c.Length == \"\" {\n\t\treturn errors.New(\"error: must specify either EndTime or Length\")\n\t}\n\tif c.Length == \"\" && c.EndTime != \"end\" && !timeRegex.MatchString(c.EndTime) {\n\t\treturn fmt.Errorf(\"error: EndTime must be 'end' or in format '%s'; got '%s'\", timePattern, c.EndTime)\n\t}\n\tif c.EndTime == \"\" && c.Length != \"full\" && !timeRegex.MatchString(c.Length) {\n\t\treturn fmt.Errorf(\"error: Length must be 'full' or in format '%s'; got '%s'\", timePattern, c.Length)\n\t}\n\n\tqualityPattern := `\\d{3,4}p[36]0`\n\tqualityRegex := regexp.MustCompile(qualityPattern)\n\tif c.Quality != \"best\" && c.Quality != \"chunked\" && !qualityRegex.MatchString(c.Quality) {\n\t\treturn fmt.Errorf(\"error: Quality must be 'best', 'chunked', or in format '%s'; got '%s'\", qualityPattern, c.Quality)\n\t}\n\n\tif c.FilePrefix != \"\" && !isValidFilename(c.FilePrefix) {\n\t\treturn fmt.Errorf(\"error: FilePrefix contains invalid characters; got '%s'\", c.FilePrefix)\n\t}\n\n\tif c.Workers < 1 {\n\t\treturn fmt.Errorf(\"error: Worker must be an integer greater than 0; got '%d'\", c.Workers)\n\t}\n\n\treturn nil\n}", "func validateRequest(logger logrus.FieldLogger, validators []Validator, r tySugRequest) bool {\n\tresult := true\n\tfor _, v := range validators {\n\t\tif vErr := v(r); vErr != nil {\n\t\t\tlogger.WithError(vErr).Error(\"Request validation failed\")\n\t\t\tresult = false\n\t\t}\n\t}\n\n\treturn result\n}" ]
[ "0.5981857", "0.5847573", "0.5823529", "0.5758432", "0.57161176", "0.56851274", "0.5593656", "0.55681103", "0.55303085", "0.5464157", "0.5461278", "0.5422691", "0.5400596", "0.53858685", "0.5366737", "0.53252876", "0.5324508", "0.5321295", "0.5315503", "0.5315133", "0.5300573", "0.52858037", "0.5273122", "0.5272048", "0.5270681", "0.52674043", "0.5265728", "0.5263977", "0.5253185", "0.5243132", "0.52414364", "0.5232898", "0.5224189", "0.52138275", "0.52130264", "0.5212228", "0.5211179", "0.520578", "0.52010226", "0.51970184", "0.51952726", "0.5191684", "0.5180815", "0.5178534", "0.51784843", "0.5175924", "0.5167366", "0.51645327", "0.51626647", "0.5160887", "0.51599157", "0.5156849", "0.5156488", "0.5153374", "0.51502085", "0.5149454", "0.51476604", "0.5145796", "0.5137264", "0.5135127", "0.51331604", "0.5132287", "0.5132033", "0.5131705", "0.5130857", "0.51306593", "0.5123962", "0.512185", "0.510523", "0.51030415", "0.5101107", "0.5097365", "0.50964797", "0.5086334", "0.5085427", "0.50774384", "0.50762445", "0.5063929", "0.50636137", "0.50636137", "0.5061554", "0.50602674", "0.50594896", "0.5057059", "0.50552213", "0.5045918", "0.50427616", "0.5033658", "0.503158", "0.503111", "0.5029972", "0.502737", "0.502286", "0.5020542", "0.50167394", "0.50155705", "0.50147706", "0.501226", "0.50073457", "0.5007212" ]
0.69006234
0
ContextValidate validate this sovren resume bimetric match request based on the context it is used
func (m *SovrenResumeBimetricMatchRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error { var res []error if err := m.contextValidatePreferredCategoryWeights(ctx, formats); err != nil { res = append(res, err) } if err := m.contextValidateSettings(ctx, formats); err != nil { res = append(res, err) } if err := m.contextValidateSourceResume(ctx, formats); err != nil { res = append(res, err) } if err := m.contextValidateTargetJobs(ctx, formats); err != nil { res = append(res, err) } if err := m.contextValidateTargetResumes(ctx, formats); err != nil { res = append(res, err) } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *FalconxMITREAttackParent) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *AcceptInvitationBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *AuthRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *executionResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *IngressVip) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateIP(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateVerification(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *PostSilencesOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *GetSearchbyIDOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *RedeemLoyaltyRewardRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *PostPunchInOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.contextValidateResponse(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *GetInteractionsInteractionFidOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with models.Envelope\n\tif err := o.Envelope.ContextValidate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.contextValidateData(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *VMTemplateWhereInput) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateAND(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateNOT(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateOR(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClockOffset(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClockOffsetIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClockOffsetNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClockOffsetNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateCluster(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateContentLibraryVMTemplate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatus(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatusIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatusNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatusNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFirmware(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFirmwareIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFirmwareNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFirmwareNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateIoPolicy(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateIoPolicyIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateIoPolicyNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateIoPolicyNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabelsEvery(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabelsNone(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabelsSome(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxBandwidthPolicy(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxBandwidthPolicyIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxBandwidthPolicyNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxBandwidthPolicyNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxIopsPolicy(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxIopsPolicyIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxIopsPolicyNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMaxIopsPolicyNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *Router) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *TogglePacketGeneratorsBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *APIPatternV1) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *PTXServiceDTOBusSpecificationV2N1Estimate) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *ScanProductsBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *UtilTestBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m HTTPMethod) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *WireguardSpec) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m Validstringresponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *DataResponseV1) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateArchive(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateCurrent(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *UtilTestOKBodyResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m FreeAddressesRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *GetMeOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with models.Envelope\n\tif err := o.Envelope.ContextValidate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.contextValidateData(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *UniverseResourceDetails) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *PostPunchInOKBodyResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.contextValidateAttendance(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *MinGasPriceResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *CloudInitNetWorkRoute) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *TrafficDefinition) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateLength(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePacket(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSignature(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *Zzz) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateMeta(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateNullableMeta(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateReqMeta(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m PinPolicy) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *GetPresetChatSessionIDOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with models.Envelope\n\tif err := o.Envelope.ContextValidate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.contextValidateData(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *RetrieveCatalogObjectRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *Constraint) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *SentEmail) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m EndpointState) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m VipType) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m VMStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *GetPastUsageBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *PaymentRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateID(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMoveTaskOrder(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePaymentRequestNumber(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateProofOfServiceDocs(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateRecalculationOfPaymentRequestID(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateServiceItems(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateStatus(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *GetInteractionsInviteInviteCodeStatusOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with models.Envelope\n\tif err := o.Envelope.ContextValidate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.contextValidateData(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *Gy) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateServer(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateServers(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateVirtualApnRules(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m PolicyMode) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *PostWalletSiafundsOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m VMFirmware) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m TenderCardDetailsEntryMethod) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *EnableTotpAuth) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *RekorVersion) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *GetHelloOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *GetRelationTuplesInternalServerErrorBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m IPSECPolicyAuthentication) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m TenderCardDetailsStatus) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *TMCEndpointProperties) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *TCPLbDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateAcceptorLoopGroup(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateBackend(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateListOfCertKey(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateProtocol(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSecurityGroup(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateWorkerLoopGroup(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *IgnoredValidations) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *SnippetDTO) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateConnections(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFunnels(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateInputPorts(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabels(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateOutputPorts(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateProcessGroups(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateProcessors(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateRemoteProcessGroups(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *DeleteVMTempNotFoundBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *ParsedOVF) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateCPU(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateDisks(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateFirmware(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateNics(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *UtilTestOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.contextValidateResponse(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *JumboMtu) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m ExcludeStrategy) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *PostUpdatePushTokenPayload) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *DeleteVMTempOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *ConsistencyProof) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m DNSServers1) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *V1Validation) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *JobJobFilament) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *GetUserInfoResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *VlanVds) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *AzureKeyVaultKey) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateScope(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSvm(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *ListInvoicesRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (tmpl *FixtureTemplate) ValidateContext(context *TemplateBuildContext) *axerror.AXError {\n\t// for each action make sure template exists\n\tif tmpl.Actions != nil {\n\t\t// Build up a scope of fixture attributes which we will use to validate any %%attributes.name%% usages in the action parameters\n\t\tscopedParams := make(paramMap)\n\t\tattributeNames := map[string]bool{}\n\t\tfor k := range FixtureAttributeReservedNamesMap {\n\t\t\tattributeNames[k] = true\n\t\t\tp := param{\n\t\t\t\tname: fmt.Sprintf(\"%s.%s\", KeywordAttributes, k),\n\t\t\t\tparamType: paramTypeString,\n\t\t\t}\n\t\t\tscopedParams[p.name] = p\n\t\t}\n\t\tfor k := range tmpl.Attributes {\n\t\t\tattributeNames[k] = true\n\t\t\tp := param{\n\t\t\t\tname: fmt.Sprintf(\"%s.%s\", KeywordAttributes, k),\n\t\t\t\tparamType: paramTypeString,\n\t\t\t}\n\t\t\tscopedParams[p.name] = p\n\t\t}\n\n\t\tfor actionName, action := range tmpl.Actions {\n\t\t\tst, exists := context.Templates[action.Template]\n\t\t\tif !exists {\n\t\t\t\t// template should exist\n\t\t\t\treturn axerror.ERR_API_INVALID_PARAM.NewWithMessagef(\"actions.%s template %s does not exist\", actionName, action.Template)\n\t\t\t}\n\t\t\trcvrInputs := st.GetInputs()\n\t\t\tswitch st.GetType() {\n\t\t\tcase TemplateTypeWorkflow, TemplateTypeContainer:\n\t\t\t\tbreak\n\t\t\tdefault:\n\t\t\t\treturn axerror.ERR_API_INVALID_PARAM.NewWithMessagef(\"actions.%s template %s must be of type container or workflow\", actionName, action.Template)\n\t\t\t}\n\t\t\tvar axErr *axerror.AXError\n\t\t\tif actionName == \"create\" || actionName == \"delete\" {\n\t\t\t\t// for create and delete actions, all reciever params need to be resolved because these actions cannot accept inputs\n\t\t\t\t// since we trigger these actions automatically on fixture create/delete.\n\t\t\t\taxErr = validateReceiverParams(st.GetName(), rcvrInputs, action.Arguments, scopedParams)\n\t\t\t} else {\n\t\t\t\t// otherwise, we just need to check the parameter type matches. unresolved attributes will become UI input parameters\n\t\t\t\t_, axErr = validateReceiverParamsPartial(st.GetName(), rcvrInputs, action.Arguments, scopedParams)\n\t\t\t}\n\t\t\tif axErr != nil {\n\t\t\t\treturn axerror.ERR_API_INVALID_PARAM.NewWithMessagef(\"actions.%s %v\", actionName, axErr)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (m GatewayName) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *CifsServiceInlineSvm) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateLinks(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *GetVersionOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with models.Envelope\n\tif err := o.Envelope.ContextValidate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *IsolationPolicyWhereInput) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateAND(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateNOT(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateOR(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEverouteCluster(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabelsEvery(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabelsNone(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLabelsSome(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateMode(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateModeIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateModeNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateModeNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateVM(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *PostEntitiesEntityFidAttachmentsUploadURLOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\t// validation for a type composition with models.Envelope\n\tif err := o.Envelope.ContextValidate(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.contextValidateData(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *VersionedFlow) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateBucketName(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateCreatedTimestamp(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateIdentifier(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLink(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateModifiedTimestamp(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePermissions(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateRevision(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateVersionCount(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *GetLighthouseOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *APIIndicatorsReportRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateSearch(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *ConfirmDownloadPayload) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *KibanaDeeplink) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *FalconxScriptCall) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateParameters(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *AppSearchPlanInfo) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateError(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePlan(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePlanAttemptLog(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSource(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateWarnings(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *MetroclusterInlineMediator) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateDrGroup(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePeerCluster(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidatePeerMediatorConnectivity(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateReachable(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateUUID(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *PostOauth2TokenOKBody) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *DomainAPIVulnerabilityV2) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateApp(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateApps(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateCve(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateHostInfo(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateRemediation(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *EncryptionAtRestConfig) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *UserUpdatePayload) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m MXJobMode) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *ClusterHostRequirementsDetails) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *BackupWPA) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *METJSONForecast) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateGeometry(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateProperties(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func ValidateRequestFromContext(c *fiber.Ctx, router routers.Router, options *Options) error {\n\n\tr, err := adaptor.ConvertRequest(c, false)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\troute, pathParams, err := router.FindRoute(r)\n\n\t// We failed to find a matching route for the request.\n\tif err != nil {\n\t\tswitch e := err.(type) {\n\t\tcase *routers.RouteError:\n\t\t\t// We've got a bad request, the path requested doesn't match\n\t\t\t// either server, or path, or something.\n\t\t\treturn errors.New(e.Reason)\n\t\tdefault:\n\t\t\t// This should never happen today, but if our upstream code changes,\n\t\t\t// we don't want to crash the server, so handle the unexpected error.\n\t\t\treturn fmt.Errorf(\"error validating route: %s\", err.Error())\n\t\t}\n\t}\n\n\t// Validate request\n\trequestValidationInput := &openapi3filter.RequestValidationInput{\n\t\tRequest: r,\n\t\tPathParams: pathParams,\n\t\tRoute: route,\n\t}\n\n\t// Pass the fiber context into the request validator, so that any callbacks\n\t// which it invokes make it available.\n\trequestContext := context.WithValue(context.Background(), ctxKeyFiberContext{}, c) //nolint:staticcheck\n\n\tif options != nil {\n\t\trequestValidationInput.Options = &options.Options\n\t\trequestValidationInput.ParamDecoder = options.ParamDecoder\n\t\trequestContext = context.WithValue(requestContext, ctxKeyUserData{}, options.UserData) //nolint:staticcheck\n\t}\n\n\terr = openapi3filter.ValidateRequest(requestContext, requestValidationInput)\n\tif err != nil {\n\t\tme := openapi3.MultiError{}\n\t\tif errors.As(err, &me) {\n\t\t\terrFunc := getMultiErrorHandlerFromOptions(options)\n\t\t\treturn errFunc(me)\n\t\t}\n\n\t\tswitch e := err.(type) {\n\t\tcase *openapi3filter.RequestError:\n\t\t\t// We've got a bad request\n\t\t\t// Split up the verbose error by lines and return the first one\n\t\t\t// openapi errors seem to be multi-line with a decent message on the first\n\t\t\terrorLines := strings.Split(e.Error(), \"\\n\")\n\t\t\treturn fmt.Errorf(\"error in openapi3filter.RequestError: %s\", errorLines[0])\n\t\tcase *openapi3filter.SecurityRequirementsError:\n\t\t\treturn fmt.Errorf(\"error in openapi3filter.SecurityRequirementsError: %s\", e.Error())\n\t\tdefault:\n\t\t\t// This should never happen today, but if our upstream code changes,\n\t\t\t// we don't want to crash the server, so handle the unexpected error.\n\t\t\treturn fmt.Errorf(\"error validating request: %w\", err)\n\t\t}\n\t}\n\treturn nil\n}", "func (m *AlertNotifierWhereInput) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateAND(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateNOT(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateOR(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClustersEvery(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClustersNone(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateClustersSome(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatus(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatusIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatusNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateEntityAsyncStatusNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLanguageCode(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLanguageCodeIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLanguageCodeNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateLanguageCodeNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSecurityMode(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSecurityModeIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSecurityModeNot(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSecurityModeNotIn(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateSMTPServerConfig(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *Model97) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateAddresses(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *RemoteProcessGroupStatusSnapshotDTO) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *CreateTicketPayload) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *SearchAvailabilityResponse) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.contextValidateAvailabilities(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.contextValidateErrors(ctx, formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *SiteIdentification) ContextValidate(ctx context.Context, formats strfmt.Registry) error {\n\treturn nil\n}" ]
[ "0.7011386", "0.696509", "0.6914004", "0.67576563", "0.67160785", "0.6682537", "0.66755444", "0.66383415", "0.66378945", "0.660261", "0.65933675", "0.65734494", "0.65459037", "0.6532578", "0.6532352", "0.6525239", "0.6511179", "0.648409", "0.6481146", "0.6480592", "0.6474037", "0.64693826", "0.64668345", "0.64642406", "0.64574456", "0.6453987", "0.64441866", "0.6442065", "0.64399976", "0.6438675", "0.64267766", "0.642492", "0.64103", "0.6394871", "0.6393993", "0.6387878", "0.6387235", "0.6384674", "0.6381088", "0.6376141", "0.63719904", "0.6370454", "0.6367956", "0.63673896", "0.636456", "0.63612026", "0.6360702", "0.6360393", "0.6357875", "0.6355016", "0.6341182", "0.6340725", "0.63396615", "0.63377494", "0.63369215", "0.63310784", "0.63295114", "0.63233167", "0.63198245", "0.63153213", "0.6308923", "0.6307251", "0.6305799", "0.63024634", "0.6298471", "0.6297124", "0.62950397", "0.6292225", "0.6280996", "0.62803453", "0.62761104", "0.6273379", "0.6272669", "0.6271113", "0.62695146", "0.62617767", "0.6259049", "0.625853", "0.6255657", "0.6254859", "0.6253786", "0.62534004", "0.62497026", "0.62440264", "0.6241343", "0.62389755", "0.6238164", "0.6234092", "0.62307113", "0.6229501", "0.6225721", "0.6225586", "0.6220578", "0.6217329", "0.62135166", "0.6207572", "0.62071264", "0.6206663", "0.6205961", "0.6204491" ]
0.7222192
0
RunServer runs gRPC server and HTTP gateway
func RunServer() error { ctx := context.Background() // get configuration var cfg Config flag.StringVar(&cfg.GRPCPort, "grpc-port", "", "gRPC port to bind") flag.StringVar(&cfg.DatastoreDBIp, "db-ip", "", "Database ip") flag.IntVar(&cfg.DatastoreDBPort, "db-port", 3000, "Database port") flag.Parse() if len(cfg.GRPCPort) == 0 { return fmt.Errorf("invalid TCP port for gRPC server: '%s'", cfg.GRPCPort) } db, err := as.NewClient(cfg.DatastoreDBIp, cfg.DatastoreDBPort) if err != nil { return fmt.Errorf("failed to open database: %v", err) } defer db.Close() v1API := v1.NewToDoServiceServer(db) return grpc.RunServer(ctx, v1API, cfg.GRPCPort) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *Server) Run() error {\n\tlogger, err := NewDefaultLogger()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to create logger\")\n\t}\n\n\tl, err := net.Listen(\"tcp\", s.address)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to listen on %s\", s.address)\n\t}\n\n\tgrpc_zap.ReplaceGrpcLogger(logger)\n\tgrpc_prometheus.EnableHandlingTimeHistogram()\n\n\ts.grpc = grpc.NewServer(\n\t\tgrpc.UnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryServer(\n\t\t\t\tgrpc_validator.UnaryServerInterceptor(),\n\t\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t\t\tgrpc_zap.UnaryServerInterceptor(logger),\n\t\t\t\tgrpc_recovery.UnaryServerInterceptor(),\n\t\t\t),\n\t\t),\n\t)\n\n\tapi.RegisterDeployServiceServer(s.grpc, s)\n\n\t// not exactly sure what this is used for, but examples\n\t// always do it:\n\t// https://godoc.org/google.golang.org/grpc/reflection\n\treflection.Register(s.grpc)\n\n\tgwmux := runtime.NewServeMux()\n\n\t_, port, err := net.SplitHostPort(s.address)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"invalid address %s\", s.address)\n\t}\n\n\t// TODO: need to determine if we can actually connect to localhost\n\tif err := api.RegisterDeployServiceHandlerFromEndpoint(s.ctx, gwmux, net.JoinHostPort(\"127.0.0.1\", port), []grpc.DialOption{grpc.WithInsecure()}); err != nil {\n\t\treturn errors.Wrap(err, \"failed to register grpc gateway\")\n\t}\n\n\tmux := http.NewServeMux()\n\tmux.Handle(\"/metrics\", promhttp.Handler())\n\tmux.Handle(\"/\", gwmux)\n\n\ts.server = &http.Server{\n\t\tHandler: h2c.Server{\n\t\t\tHandler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\t\tif r.ProtoMajor == 2 &&\n\t\t\t\t\tstrings.Contains(r.Header.Get(\"Content-Type\"), \"application/grpc\") {\n\t\t\t\t\ts.grpc.ServeHTTP(w, r)\n\t\t\t\t} else {\n\t\t\t\t\tmux.ServeHTTP(w, r)\n\t\t\t\t}\n\t\t\t}),\n\t\t},\n\t}\n\n\tif err := s.server.Serve(l); err != nil {\n\t\tif err != http.ErrServerClosed {\n\t\t\treturn errors.Wrap(err, \"failed to start http server\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func RunServer(ctx context.Context, grpcPort, httpPort, certFilePath string, keyFilePath string) error {\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\t// mux := runtime.NewServeMux()\n\tmux := runtime.NewServeMux(\n\t\truntime.WithMarshalerOption(runtime.MIMEWildcard, &runtime.JSONPb{}),\n\t\truntime.WithIncomingHeaderMatcher(CustomMatcher),\n\t\truntime.WithErrorHandler(DefaultHTTPProtoErrorHandler),\n\t\t// runtime.WithProtoErrorHandler(DefaultHTTPProtoErrorHandler),\n\t)\n\topts := []grpc.DialOption{}\n\tif certFilePath != \"\" && keyFilePath != \"\" {\n\t\t// creds, err := credentials.NewServerTLSFromFile(certFilePath, keyFilePath)\n\t\t// creds, err := credentials.NewClientTLSFromFile(certFilePath, \"CheeTest\")\n\t\t// if err != nil {\n\t\t// \tlog.Fatalf(\"Failed to generate credentials %v\", err)\n\t\t// }\n\n\t\tb, _ := ioutil.ReadFile(certFilePath)\n\t\tcp := x509.NewCertPool()\n\t\tif !cp.AppendCertsFromPEM(b) {\n\t\t\tlog.Fatalf(\"fail to dial: %v\", errors.New(\"credentials: failed to append certificates\"))\n\t\t}\n\t\tconfig := &tls.Config{\n\t\t\tInsecureSkipVerify: true,\n\t\t\tRootCAs: cp,\n\t\t}\n\t\tcreds := credentials.NewTLS(config)\n\n\t\topts = append(opts, grpc.WithTransportCredentials(creds))\n\t} else {\n\t\topts = append(opts, grpc.WithInsecure())\n\t}\n\tif err := pb.RegisterMailNotificationServiceHandlerFromEndpoint(ctx, mux, \"localhost:\"+grpcPort, opts); err != nil {\n\t\t// log.Fatalf(\"failed to start HTTP gateway: %v\", err)\n\t\tlogger.Log.Fatal(\"failed to start HTTP gateway: %v\", zap.String(\"reason\", err.Error()))\n\t}\n\tfmt.Println(\"REST : gRPC client up\")\n\n\tcert, err := tls.LoadX509KeyPair(certFilePath, keyFilePath)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\n\ttlsConfig := &tls.Config{Certificates: []tls.Certificate{cert}}\n\tsrv := &http.Server{\n\t\tAddr: \":\" + httpPort,\n\t\t// Handler: mux,\n\t\tHandler: middleware.AddRequestID(middleware.AddLogger(logger.Log, mux)),\n\t\tTLSConfig: tlsConfig,\n\t}\n\n\t// graceful shutdown\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\tgo func() {\n\t\tfor range c {\n\t\t\t// sig is a ^C, handle it\n\t\t}\n\n\t\t_, cancel := context.WithTimeout(ctx, 5*time.Second)\n\t\tdefer cancel()\n\n\t\t_ = srv.Shutdown(ctx)\n\t}()\n\n\tfmt.Println(\"starting HTTP/REST gateway...\")\n\tlogger.Log.Info(\"starting HTTP/REST gateway...\")\n\t// return srv.ListenAndServeTLS(certFilePath, keyFilePath)\n\treturn srv.ListenAndServe()\n}", "func RunServer(ctx context.Context, grpcPort, httpPort string) error {\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\trmux := runtime.NewServeMux()\n\topts := []grpc.DialOption{grpc.WithInsecure()}\n\tif err := patstoreV1.RegisterPetStoreServiceHandlerFromEndpoint(ctx, rmux, \"0.0.0.0:\"+grpcPort, opts); err != nil {\n\t\tlog.Fatalf(\"failed to start HTTP gateway: %v\", err)\n\t}\n\n\t// Serve the swagger,\n\tmux := http.NewServeMux()\n\tmux.Handle(\"/\", rmux)\n\n\tmux.HandleFunc(\"/swagger.json\", serveSwagger)\n\tfs := http.FileServer(http.Dir(\"www/swagger-ui\"))\n\tmux.Handle(\"/swagger-ui/\", http.StripPrefix(\"/swagger-ui\", fs))\n\n\tsrv := &http.Server{\n\t\tAddr: \":\" + httpPort,\n\t\tHandler: mux,\n\t}\n\n\t// graceful shutdown\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\tgo func() {\n\t\tfor range c {\n\t\t\t// sig is a ^C, handle it\n\t\t}\n\n\t\t_, cancel := context.WithTimeout(ctx, 5*time.Second)\n\t\tdefer cancel()\n\n\t\t_ = srv.Shutdown(ctx)\n\t}()\n\n\tlog.Println(\"starting HTTP/REST gateway...\" + httpPort)\n\tlog.Println(\"Serving Swagger at: http://localhost:8080/swagger-ui/\")\n\treturn srv.ListenAndServe()\n}", "func Run(config *Config, storage *server.Storager) {\n\tnetwork, address := networkAddresFromString(config.Address)\n\n\tconn, err := net.Listen(network, address)\n\tif err != nil {\n\t\tlog.Fatalln(\"could not listen to address\", config.Address)\n\t}\n\n\tlog.Printf(\"starting grpc server on %s\", address)\n\n\tvar (\n\t\tsrv *http.Server\n\t\tgrpcServer *grpc.Server\n\t\trestMux *runtime.ServeMux\n\t\ttlsConfig *tls.Config\n\t\tctx = context.Background()\n\t\thttpMux = http.NewServeMux()\n\t)\n\n\ttlsConfig, err = tlsClientConfig(config.CAFile, config.CertFile, config.KeyFile, config.ServerName)\n\tif err != nil {\n\t\tlog.Fatal(\"Failed to create tls config\", err)\n\t}\n\n\tdialOptions := getDialOptions(tlsConfig)\n\tserverOptions := getServerOptions(tlsConfig)\n\n\tgrpcServer = newGrpcServer(storage, serverOptions...)\n\trestMux, _ = newRestMux(ctx, address, dialOptions...)\n\n\thttpMux.Handle(\"/\", restMux)\n\n\tmergeHandler := grpcHandlerFunc(grpcServer, httpMux)\n\n\t// Setup the CORS middleware. If `config.CORSAllowedOrigins` is empty, no CORS\n\t// Origins will be allowed through.\n\tcors := cors.New(cors.Options{\n\t\tAllowedOrigins: config.CORSAllowedOrigins,\n\t})\n\n\tsrv = &http.Server{\n\t\tHandler: cors.Handler(h2c.NewHandler(mergeHandler, &http2.Server{})),\n\t\tTLSConfig: tlsConfig,\n\t}\n\n\t// blocking call\n\tif tlsConfig != nil {\n\t\terr = srv.Serve(tls.NewListener(conn, srv.TLSConfig))\n\t} else {\n\t\terr = srv.Serve(conn)\n\t}\n\thandleShutdown(err)\n\tlog.Println(\"Grpc API stopped\")\n}", "func RunServer(ctx context.Context, v1Api apiv1.GreeterServer, port string) error {\n\tlisten, err := net.Listen(\"tcp\", \":\"+port)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// register service\n\tserver := grpc.NewServer()\n\tapiv1.RegisterGreeterServer(server, v1Api)\n\n\t// start gRPC server\n\tlog.Println(\"starting gRPC server at localhost:\", port)\n\treturn server.Serve(listen)\n}", "func Run(options *Options) {\n\tvar s serverInterface\n\tctx := context.Background()\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\tif options.CustomServer != nil {\n\t\ts = options.CustomServer\n\t} else {\n\t\tlis, err := net.Listen(\"tcp\", options.GRPCPort)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t\t}\n\n\t\ts = &server{\n\t\t\tListener: lis,\n\t\t\tOptions: options,\n\t\t\tContext: ctx,\n\t\t}\n\t}\n\n\t// If HTTP Options are defined then spin off gRPC server on a separate thread\n\tif options.HTTPPort != \"\" && options.RegisterHandlerFunc != nil {\n\t\tlog.Debug(\"HTTP_PORT and RegisterHandlerFunc are defined - starting gRPC with HTTP Proxy\")\n\t\tgo s.ServeGRPC()\n\n\t\terr := s.CheckGRPCConnectivity(options)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tif err := s.ServeHTTP(); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\treturn\n\t}\n\n\t// If HTTP server not needed - just serve GRPC\n\ts.ServeGRPC()\n}", "func RunServer() error {\n\tcreds, err := credentials.NewServerTLSFromFile(certFile, keyFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts := grpc.NewServer(grpc.Creds(creds))\n\tpb.RegisterGreeterServer(s, &server{})\n\n\tlis, err := net.Listen(\"tcp\", address)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn s.Serve(lis)\n}", "func (s *GenericGrpcServer) Run(servingOptions *serverOptions.SecureServingOptions) {\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", servingOptions.BindPort))\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tlog.Fatalf(\"failed to listening: %v\", err)\n\t}\n\tlog.Infof(\"starting service at %s\", lis.Addr())\n\tvar opts []grpc.ServerOption\n\tif servingOptions.IsTlsEnabled() {\n\t\tcreds, err := credentials.NewServerTLSFromFile(\n\t\t\tservingOptions.ServerCert.CertKey.CertFile,\n\t\t\tservingOptions.ServerCert.CertKey.KeyFile,\n\t\t)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to generate credentials %v\", err)\n\t\t}\n\t\topts = []grpc.ServerOption{grpc.Creds(creds)}\n\t}\n\n\ts.Listener = lis\n\ts.Server = grpc.NewServer(opts...)\n\n\ts.RegisterService()\n\tgrpc_health_v1.RegisterHealthServer(s.Server, health.NewServer())\n\tgo s.Server.Serve(lis)\n}", "func RunServer(\r\n\tctx context.Context, port string, emulatorPath string,\r\n) error {\r\n\tlis, err := net.Listen(\"tcp\", \":\"+port)\r\n\tif err != nil {\r\n\t\tlogger.Error.Fatalf(\"failed to listen: %v\", err)\r\n\t}\r\n\r\n\t// register service\r\n\tserver := grpc.NewServer()\r\n\tapi.RegisterEmailClientServiceApiServer(server, NewEmailClientServiceServer(\r\n\t\temulatorPath,\r\n\t))\r\n\r\n\t// graceful shutdown\r\n\tc := make(chan os.Signal, 1)\r\n\tsignal.Notify(c, os.Interrupt)\r\n\tgo func() {\r\n\t\tfor range c {\r\n\t\t\t// sig is a ^C, handle it\r\n\t\t\tlogger.Info.Println(\"shutting down gRPC server...\")\r\n\t\t\tserver.GracefulStop()\r\n\t\t\t<-ctx.Done()\r\n\t\t}\r\n\t}()\r\n\r\n\t// start gRPC server\r\n\tlogger.Info.Println(\"starting gRPC server...\")\r\n\tlogger.Info.Println(\"wait connections on port \" + port)\r\n\treturn server.Serve(lis)\r\n}", "func RunServer(ctx context.Context, opts ...ServerOption) error {\n\tconf, err := processArgs(opts)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to parse arguments\")\n\t}\n\n\tvar grpcOpts []grpc.ServerOption\n\tif conf.TLS {\n\t\tcreds, err := credentials.NewServerTLSFromFile(conf.certFile, conf.keyFile)\n\t\tif err != nil {\n\t\t\tconf.log.Fatalf(\"Failed to generate credentials %v\", err)\n\t\t}\n\t\tgrpcOpts = []grpc.ServerOption{grpc.Creds(creds)}\n\t}\n\n\tconf.log.Infof(\"Starting to listen on port %d\", conf.port)\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"0.0.0.0:%d\", conf.port))\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to start GRPC listener\")\n\t}\n\n\t// handle context cancellation, shut down the server\n\tgo func() {\n\t\t<-ctx.Done()\n\t\t_ = lis.Close()\n\t}()\n\n\tgrpcServer := grpc.NewServer(grpcOpts...)\n\tpb.RegisterTunnelServer(grpcServer, NewServer(conf))\n\treturn grpcServer.Serve(lis)\n}", "func RunServer(server *ophttp.Server) {\n\thttp.Handle(\"/greeting\", http.HandlerFunc(GreetingHandler))\n\tserver.Start()\n}", "func RunServer(ctx context.Context, server v1.AcreProtectionServer, port string) {\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"localhost:%d\", 10000))\n\tif err != nil {\n\t\tlogger.Log.Fatal(\"failed to listen\", zap.String(\"reason\", err.Error()))\n\t}\n\tvar opts []grpc.ServerOption\n\n\t// add middleware\n\topts = middleware.AddLogging(logger.Log, opts)\n\n\tgrpcServer := grpc.NewServer(opts...)\n\tv1.RegisterAcreProtectionServer(grpcServer, server)\n\n\tlogger.Log.Info(\"starting gRPC server...\")\n\tgrpcServer.Serve(lis)\n}", "func (s Server) Run(ctx context.Context) error {\n\tctx, cancel := context.WithCancel(ctx)\n\terrWg := errgroup.Group{}\n\n\terrWg.Go(func() error {\n\t\tlis, err := net.Listen(\"tcp\", s.grpcAddr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn s.grpcServer.Serve(lis)\n\t})\n\n\terrWg.Go(func() error {\n\t\tl, err := net.Listen(\"tcp\", s.httpAddr)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn s.httpServer.Serve(l)\n\t})\n\n\terrWg.Go(func() error {\n\t\tswaggerAddr := s.httpAddr + swaggerUIPrefix\n\t\tlog.Info().Msgf(\"App started. HTTP: %s, Swagger UI: %s, gRPC: %s\", s.httpAddr, swaggerAddr, s.grpcAddr)\n\t\treturn nil\n\t})\n\n\terrWg.Go(func() error {\n\t\tshutdownCh := make(chan os.Signal, 1)\n\t\tsignal.Notify(shutdownCh, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)\n\t\tsig := <-shutdownCh\n\n\t\ts.stop(ctx)\n\t\tcancel()\n\n\t\tlog.Fatal().Msgf(\"exit reason: %s\", sig)\n\n\t\treturn nil\n\t})\n\n\treturn errWg.Wait()\n}", "func Run(ctx context.Context, options Options) error {\n\tbackendHostPort := fmt.Sprintf(\"localhost:%d\", options.BackendPort)\n\tgrpcServer, err := buildGrpcProxyServer(backendHostPort)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\twrappedGrpc := grpcweb.WrapServer(grpcServer,\n\t\tgrpcweb.WithCorsForRegisteredEndpointsOnly(false),\n\t\tgrpcweb.WithOriginFunc(func(origin string) bool { return true }),\n\t\tgrpcweb.WithWebsockets(true),\n\t\tgrpcweb.WithWebsocketOriginFunc(func(req *http.Request) bool { return true }),\n\t)\n\n\thttpServer := &http.Server{\n\t\tWriteTimeout: httpMaxWriteTimeout,\n\t\tReadTimeout: httpMaxReadTimeout,\n\t\tHandler: http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {\n\t\t\tisCorsPreflightRequest := wrappedGrpc.IsAcceptableGrpcCorsRequest(req)\n\t\t\tisGrpcWebRequest := wrappedGrpc.IsGrpcWebRequest(req)\n\t\t\tisGrpcWebSocketRequest := wrappedGrpc.IsGrpcWebSocketRequest(req)\n\t\t\tlog.WithFields(logrus.Fields{\n\t\t\t\t\"remote\": req.RemoteAddr,\n\t\t\t\t\"url\": req.URL,\n\t\t\t\t\"method\": req.Method,\n\t\t\t}).Trace(\"grpc-web endpoint HTTP request\")\n\n\t\t\tif isCorsPreflightRequest || isGrpcWebRequest || isGrpcWebSocketRequest {\n\t\t\t\tif isCorsPreflightRequest {\n\t\t\t\t\tlog.WithFields(logrus.Fields{\n\t\t\t\t\t\t\"remote\": req.RemoteAddr,\n\t\t\t\t\t\t\"url\": req.URL,\n\t\t\t\t\t}).Debug(\"CORS preflight request\")\n\t\t\t\t}\n\t\t\t\twrappedGrpc.ServeHTTP(resp, req)\n\t\t\t} else {\n\t\t\t\tresp.Header().Set(\"Content-Type\", \"application/json\")\n\t\t\t\t_, err := io.WriteString(resp, fmt.Sprintf(\"{\"+\n\t\t\t\t\t\"\\\"message\\\":\\\"This the grpc-web endpoint for Cogment Orchestrator\\\",\"+\n\t\t\t\t\t\"\\\"version\\\":\\\"%s\\\",\"+\n\t\t\t\t\t\"\\\"version_hash\\\":\\\"%s\\\"\"+\n\t\t\t\t\t\"}\", version.Version, version.Hash))\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Fatal(\"Error while writing response\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}),\n\t}\n\twebAddr := fmt.Sprintf(\"0.0.0.0:%d\", options.WebPort)\n\tlistener, err := net.Listen(\"tcp\", webAddr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to listen on port %d: %w\", options.WebPort, err)\n\t}\n\thttpListener := conntrack.NewListener(listener,\n\t\tconntrack.TrackWithName(\"http\"),\n\t\tconntrack.TrackWithTcpKeepAlive(20*time.Second),\n\t\tconntrack.TrackWithTracing(),\n\t)\n\n\tlog.WithField(\"address\", httpListener.Addr().String()).Info(\"server listening\")\n\n\thttpRes := make(chan error)\n\tgo func() {\n\t\tif err := httpServer.Serve(httpListener); err != nil {\n\t\t\thttpRes <- fmt.Errorf(\"http server error: %w\", err)\n\t\t\treturn\n\t\t}\n\t\thttpRes <- nil\n\t}()\n\n\tselect {\n\tcase <-ctx.Done():\n\t\tshutdownCtx, _ := context.WithTimeout(context.Background(), shutdownTimeout)\n\t\terr := httpServer.Shutdown(shutdownCtx)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"unable to shutdown the grpcwebproxy: %w\", err)\n\t\t}\n\t\terr = <-httpRes\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error while waiting for the grpcwebproxy to shutdown: %w\", err)\n\t\t}\n\t\treturn ctx.Err()\n\tcase err := <-httpRes:\n\t\treturn err\n\t}\n}", "func RunServer(port uint16) {\n\tportStr := fmt.Sprintf(\":%v\", port)\n\tlis, err := net.Listen(\"tcp\", portStr)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\n\tgrpcServer := grpc.NewServer()\n\tpb.RegisterGreeterServer(grpcServer, &server{})\n\t// determine whether to use TLS\n\n\tlog.Printf(\"Running gRPC server on port %v...\", portStr)\n\tif err := grpcServer.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func RunRestServer(ctx context.Context, grpcPort int, httpPort int) error {\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\tmux := runtime.NewServeMux(\n\t\truntime.WithMarshalerOption(runtime.MIMEWildcard, &runtime.JSONPb{OrigName: true, EmitDefaults: true}),\n\t\truntime.WithIncomingHeaderMatcher(CustomHeaderMatcher),\n\t)\n\n\tlogger.Log.Info(fmt.Sprintf(\"HTTP Port: %d\", httpPort))\n\n\topts := []grpc.DialOption{\n\t\tgrpc.WithInsecure(),\n\t\tgrpc.WithUnaryInterceptor(\n\t\t\tgrpc_opentracing.UnaryClientInterceptor(\n\t\t\t\tgrpc_opentracing.WithTracer(opentracing.GlobalTracer()),\n\t\t\t),\n\t\t),\n\t}\n\n\terr := credentail_pb.RegisterCredentialServiceHandlerFromEndpoint(ctx, mux, fmt.Sprintf(\":%d\", grpcPort), opts)\n\tif err != nil {\n\t\tlogger.Log.Fatal(\"failed to start HTTP gateway\", zap.String(\"reason\", err.Error()))\n\t}\n\n\tsrv := &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", httpPort),\n\t\t// add handler with middleware\n\t\tHandler: middleware.TracingWrapper(middleware.RequestID(middleware.AddLogger(logger.Log, mux))),\n\t}\n\n\t// graceful shutdown\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\tgo func() {\n\t\tfor range c {\n\t\t\t// sig is a ^C, handle it\n\t\t\tlogger.Log.Warn(\"shutting down gRPC server...\")\n\t\t}\n\t\t_, cancel := context.WithTimeout(ctx, 5*time.Second)\n\t\tdefer cancel()\n\t\t_ = srv.Shutdown(ctx)\n\t}()\n\n\tlogger.Log.Info(\"starting HTTP/REST gateway...\")\n\treturn srv.ListenAndServe()\n}", "func (s *Server) Run() error {\n\tif s.Port == 0 {\n\t\treturn fmt.Errorf(\"server port must be set\")\n\t}\n\n\ts.uuid = uuid.New().String()\n\n\topts := []grpc.ServerOption{\n\t\tgrpc.KeepaliveParams(keepalive.ServerParameters{\n\t\t\tTimeout: 120 * time.Second,\n\t\t}),\n\t\tgrpc.KeepaliveEnforcementPolicy(keepalive.EnforcementPolicy{\n\t\t\tPermitWithoutStream: true,\n\t\t}),\n\t\tgrpc.UnaryInterceptor(\n\t\t\totgrpc.OpenTracingServerInterceptor(s.Tracer),\n\t\t),\n\t}\n\n\tif tlsopt := tls.GetServerOpt(); tlsopt != nil {\n\t\topts = append(opts, tlsopt)\n\t}\n\n\tsrv := grpc.NewServer(opts...)\n\tpb.RegisterSearchServer(srv, s)\n\n\t// init grpc clients\n\tif err := s.initGeoClient(\"srv-geo\"); err != nil {\n\t\treturn err\n\t}\n\tif err := s.initRateClient(\"srv-rate\"); err != nil {\n\t\treturn err\n\t}\n\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", s.Port))\n\tif err != nil {\n\t\tlog.Fatal().Msgf(\"failed to listen: %v\", err)\n\t}\n\n\t// register with consul\n\t// jsonFile, err := os.Open(\"config.json\")\n\t// if err != nil {\n\t// \tfmt.Println(err)\n\t// }\n\n\t// defer jsonFile.Close()\n\n\t// byteValue, _ := ioutil.ReadAll(jsonFile)\n\n\t// var result map[string]string\n\t// json.Unmarshal([]byte(byteValue), &result)\n\n\terr = s.Registry.Register(name, s.uuid, s.IpAddr, s.Port)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed register: %v\", err)\n\t}\n\tlog.Info().Msg(\"Successfully registered in consul\")\n\n\treturn srv.Serve(lis)\n}", "func (s *FrontendServer) Run(tls bool, certFile string,\n\tkeyFile string) error {\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", s.hostname, s.port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\tvar lis2 net.Listener\n\tif (s.hostnameGw != \"\") && (s.portGw != 0) {\n\t\tlis2, err = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", s.hostnameGw, s.portGw))\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to listen on second address: %v\", err)\n\t\t}\n\t}\n\tvar opts []grpc.ServerOption\n\tif tls {\n\t\t// if caFile == \"\" {\n\t\t// \tset default caFile path\n\t\t// }\n\t\t// if keyFile == \"\" {\n\t\t// \tset default keyFile path\n\t\t// }\n\t\tcreds, err := credentials.NewServerTLSFromFile(certFile, keyFile)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to generate credentials %v\", err)\n\t\t}\n\t\topts = []grpc.ServerOption{grpc.Creds(creds)}\n\t}\n\tgrpcServer := grpc.NewServer(opts...)\n\tpb.RegisterFrontendServer(grpcServer, s)\n\tgo grpcServer.Serve(lis)\n\tif lis2 != nil {\n\t\tgo grpcServer.Serve(lis2)\n\t}\n\treturn nil\n}", "func RunServer(ctx context.Context, cfg config.ServiceConfig, handler http.Handler) error {\n\tdone := make(chan error)\n\ts := &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", cfg.Port),\n\t\tHandler: handler,\n\t\tReadTimeout: cfg.ReadTimeout,\n\t\tWriteTimeout: cfg.WriteTimeout,\n\t\tReadHeaderTimeout: cfg.ReadHeaderTimeout,\n\t\tIdleTimeout: cfg.IdleTimeout,\n\t}\n\n\tgo func() {\n\t\tdone <- s.ListenAndServe()\n\t}()\n\n\tselect {\n\tcase err := <-done:\n\t\treturn err\n\tcase <-ctx.Done():\n\t\treturn s.Shutdown(context.Background())\n\t}\n\n}", "func startServer() error {\n\ts := signal.NewSignal()\n\tctx := context.Background()\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\t// Initialize and register the implementation of gRPC interface\n\tvar grpcServer *grpc.Server\n\tvar proxyRegstr []func(context.Context, *proxy.ServeMux, string, []grpc.DialOption) error\n\toptions := []grpc.ServerOption{\n\t\tgrpc.UnaryInterceptor(loggerInterceptor()),\n\t}\n\n\tif cfg.CoreServiceMode {\n\t\trpcServer := rpccore.NewRPCServer(s, internal.NewProver, internal.NewVerifier, shared.NewHashFunc, shared.NewScryptHashFunc)\n\t\tgrpcServer = grpc.NewServer(options...)\n\n\t\tapicore.RegisterPoetCoreProverServer(grpcServer, rpcServer)\n\t\tapicore.RegisterPoetVerifierServer(grpcServer, rpcServer)\n\t\tproxyRegstr = append(proxyRegstr, apicore.RegisterPoetCoreProverHandlerFromEndpoint)\n\t\tproxyRegstr = append(proxyRegstr, apicore.RegisterPoetVerifierHandlerFromEndpoint)\n\t} else {\n\t\trpcServer := rpc.NewRPCServer()\n\t\tgrpcServer = grpc.NewServer(options...)\n\n\t\tapi.RegisterPoetServer(grpcServer, rpcServer)\n\t\tproxyRegstr = append(proxyRegstr, api.RegisterPoetHandlerFromEndpoint)\n\t}\n\n\t// Start the gRPC server listening for HTTP/2 connections.\n\tlis, err := net.Listen(cfg.RPCListener.Network(), cfg.RPCListener.String())\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to listen: %v\\n\", err)\n\t}\n\tdefer lis.Close()\n\n\tgo func() {\n\t\trpcsLog.Infof(\"RPC server listening on %s\", lis.Addr())\n\t\tgrpcServer.Serve(lis)\n\t}()\n\n\t// Start the REST proxy for the gRPC server above.\n\tmux := proxy.NewServeMux()\n\tfor _, r := range proxyRegstr {\n\t\terr := r(ctx, mux, cfg.RPCListener.String(), []grpc.DialOption{grpc.WithInsecure()})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tgo func() {\n\t\trpcsLog.Infof(\"REST proxy start listening on %s\", cfg.RESTListener.String())\n\t\terr := http.ListenAndServe(cfg.RESTListener.String(), mux)\n\t\trpcsLog.Errorf(\"REST proxy failed listening: %s\\n\", err)\n\t}()\n\n\t// Wait for shutdown signal from either a graceful server stop or from\n\t// the interrupt handler.\n\t<-s.ShutdownChannel()\n\treturn nil\n}", "func (a *App) Run() {\n\tlog := a.log\n\tlistener, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", a.host, a.port))\n\tif err != nil {\n\t\tlog.Panic(err.Error())\n\t}\n\tlog.Infof(\"events gateway listening on %s:%d\", a.host, a.port)\n\n\tvar opts []grpc.ServerOption\n\n\totelPropagator := otelgrpc.WithPropagators(otel.GetTextMapPropagator())\n\totelTracerProvider := otelgrpc.WithTracerProvider(otel.GetTracerProvider())\n\n\topts = append(opts, grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(\n\t\ta.metricsReporterInterceptor,\n\t\totelgrpc.UnaryServerInterceptor(otelTracerProvider, otelPropagator),\n\t)))\n\topts = append(opts, grpc.KeepaliveParams(keepalive.ServerParameters{\n\t\tMaxConnectionIdle: a.config.GetDuration(\"server.maxConnectionIdle\"),\n\t\tMaxConnectionAge: a.config.GetDuration(\"server.maxConnectionAge\"),\n\t\tMaxConnectionAgeGrace: a.config.GetDuration(\"server.maxConnectionAgeGrace\"),\n\t\tTime: a.config.GetDuration(\"server.Time\"),\n\t\tTimeout: a.config.GetDuration(\"server.Timeout\"),\n\t}))\n\ta.grpcServer = grpc.NewServer(opts...)\n\n\tpb.RegisterGRPCForwarderServer(a.grpcServer, a.Server)\n\tif err := a.grpcServer.Serve(listener); err != nil {\n\t\tlog.Panic(err.Error())\n\t}\n}", "func RunHTTPServer(ctx context.Context) error {\n\tconf := config.MustLoad()\n\n\treturn gateway.ListenAndServe(\":\"+conf.HTTP.Port, setupRouter())\n}", "func RunServer(ctx context.Context, srv3 serverv3.Server, port uint) {\n\tlog := logger.New(\"runServer\")\n\tdefer log.LogDone()\n\tvar grpcOptions []grpc.ServerOption\n\tgrpcOptions = append(grpcOptions, grpc.MaxConcurrentStreams(grpcMaxConcurrentStreams))\n\tgrpcServer := grpc.NewServer(grpcOptions...)\n\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", port))\n\tif err != nil {\n\t\tlog.Fail(err.Error())\n\t}\n\n\tregisterServer(grpcServer, srv3)\n\n\tlog.Infof(\"management server listening on %d\\n\", port)\n\tif err = grpcServer.Serve(lis); err != nil {\n\t\tlog.Fail(err.Error())\n\t}\n\n}", "func (service *Service) Run(ctx context.Context, insecure bool) error {\n\tif service.baseEndpoint == \"\" {\n\t\tservice.baseEndpoint = \"/\"\n\t}\n\t// Registration of service endpoint\n\tservice.httpMux.Handle(service.baseEndpoint, service.runtimeMux)\n\n\t// Apply middlewares\n\thandler := http_middleware.Apply(service.Handler(), service.httpMiddlewares...)\n\n\t// the grpcHandlerFunc takes an grpc server and a http muxer and will\n\t// route the request to the right place at runtime.\n\t// handler := grpcHandlerFunc(service.GRPCServer(), service.HTTPMux())\n\tghandler := grpcHandlerFunc(service.GRPCServer(), handler)\n\n\t// HTTP server configuration\n\thttpServer := &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", service.cfg.ServicePort()),\n\t\tHandler: ghandler,\n\t\tReadTimeout: time.Duration(5 * time.Second),\n\t\tReadHeaderTimeout: time.Duration(5 * time.Second),\n\t\tWriteTimeout: time.Duration(5 * time.Second),\n\t}\n\n\t// Graceful shutdown of server\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, os.Interrupt)\n\tgo func() {\n\t\tfor range c {\n\t\t\tif service.cfg.Logging() {\n\t\t\t\tlogger.Log.Warn(\n\t\t\t\t\t\"shutting service...\", zap.String(\"service name\", service.cfg.ServiceName()),\n\t\t\t\t)\n\t\t\t}\n\t\t\thttpServer.Shutdown(ctx)\n\n\t\t\t<-ctx.Done()\n\t\t}\n\t}()\n\n\t// Create TCP listener\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", service.cfg.ServicePort()))\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to create TCP listener\")\n\t}\n\n\tlogMsgFn := func() {\n\t\tif service.cfg.Logging() {\n\t\t\tlogger.Log.Info(\n\t\t\t\t\"<gRPC and REST> server for service running\",\n\t\t\t\tzap.String(\"service name\", service.cfg.ServiceName()),\n\t\t\t\tzap.Int(\"gRPC Port\", service.cfg.ServicePort()),\n\t\t\t)\n\t\t} else {\n\t\t\tlogrus.Infof(\n\t\t\t\t\"<gRPC and REST> server for service running service: %s port: %d\",\n\t\t\t\tservice.cfg.ServiceName(), service.cfg.ServicePort(),\n\t\t\t)\n\t\t}\n\t}\n\n\tlogMsgFn()\n\n\tif insecure {\n\t\treturn httpServer.Serve(lis)\n\t}\n\n\t// Parse HTTP server TLS config\n\tserverTLSsConfig, err := micro_tls.HTTPServerConfig()\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to create TLS config for HTTP server\")\n\t}\n\n\treturn httpServer.Serve(tls.NewListener(lis, serverTLSsConfig))\n}", "func RunServer() error {\n\tctx := context.Background()\n\t// initialize logger\n\tif err := logger.Init(*logLevel, *logTimeFormat); err != nil {\n\t\treturn fmt.Errorf(\"failed to initialize logger: %v\", err)\n\t}\n\n\tflag.Parse()\n\n\ts := &service.Server{}\n\ts.LoadUsers(*jsonDBFile)\n\t// Goroutines: A goroutine is a lightweight thread managed by the Go runtime.\n\tgo grpc.RunServer(ctx, s, *port)\n\n\treturn rest.RunServer(ctx, *httpPort, *port)\n}", "func runServer(blockchain *pkg.Blockchain) error {\n\tmux := api.MakeMuxRouter(blockchain)\n\thttpPort, found := os.LookupEnv(\"PORT\")\n\n\tif found == false {\n\t\thttpPort = \"8080\"\n\t\tfmt.Println(\"\\\"PORT\\\" Not Found in env using default 8080\")\n\t}\n\tlog.Println(\"HTTP Server Listening on port :\", httpPort)\n\ts := &http.Server{\n\t\tAddr: \":\" + httpPort,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\n\terr := s.ListenAndServe()\n\n\treturn err\n}", "func RunServer(core http.HandlerFunc) error {\n\tvar port = viper.GetInt(\"port\")\n\tvar tls = viper.GetBool(\"tls\")\n\tvar cert = viper.GetString(\"cert\")\n\tvar key = viper.GetString(\"key\")\n\n\t// Run the server\n\tsrv := &http.Server{\n\t\tAddr: fmt.Sprintf(\"0.0.0.0:%d\", port),\n\t\tHandler: core,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\tif tls {\n\t\tlog.Printf(\"Start server, listen on https://%+v\", srv.Addr)\n\t\tlog.Fatal(srv.ListenAndServeTLS(cert, key))\n\t} else {\n\t\tlog.Printf(\"Start server, listen on http://%+v\", srv.Addr)\n\t\tlog.Fatal(srv.ListenAndServe())\n\t}\n\n\treturn nil\n}", "func RunServer(host string, port int) {\n\thandleWebsocket()\n\thandlePublicFiles()\n\n\tlogger.Println(fmt.Sprintf(\"server started at http://:%d/\", port))\n\tlog.Fatal(http.ListenAndServe(fmt.Sprintf(\":%d\", port), nil))\n}", "func (server *Server) runServer() {\n\tserver.G.Go(func() error {\n\t\tserver.API.log.Info(\"running server %v\", server.config.Server.ListenAddr)\n\t\treturn http.ListenAndServe(server.config.Server.ListenAddr, server.Server.Handler)\n\t})\n}", "func RunServer(server client.Service) {\n\tr := mux.NewRouter()\n\tr.HandleFunc(server.HealthCheck, func(w http.ResponseWriter, r *http.Request) {\n\t\t// an example API handler\n\t\tjson.NewEncoder(w).Encode(map[string]bool{\"ok\": true})\n\t}).Methods(\"GET\")\n\tsrv := &http.Server{\n\t\tHandler: r,\n\t\tAddr: fmt.Sprintf(\"%s:%d\", server.URL, server.Port),\n\t\tWriteTimeout: 15 * time.Second,\n\t\tReadTimeout: 15 * time.Second,\n\t}\n\tlog.Fatal(srv.ListenAndServe())\n}", "func Run() error {\n\tgo StartServer()\n\n\tlis, err := net.Listen(\"tcp\", \":50051\")\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\n\ts := grpc.NewServer()\n\n\tklessapi.RegisterKlessAPIServer(s, &apiserver.APIServer{})\n\tif err := s.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n\treturn nil\n}", "func Run(host string, port int) error {\n\tsrv := grpc.NewServer()\n\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", host, port))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tproto.RegisterBlockchainServer(srv, &server{\n\t\tblockchain: blockchain.NewBlockchain(),\n\t})\n\n\tlog.Println(fmt.Sprintf(\"Server listening on %s:%d\", host, port))\n\n\treturn srv.Serve(lis)\n}", "func runServer() {\n\t// listen and serve on 0.0.0.0:8080 (for windows \"localhost:8080\")\n\tlog.Fatalln(router.Run(fmt.Sprintf(\":%s\", env.AppPort)))\n}", "func (s *grpcServer) Run(ctx context.Context, ready func()) error {\n\tlogger := log.WithContext(ctx)\n\ts.server.Init(ctx, nil)\n\tlistener, err := net.Listen(\"tcp\", s.cfg.Address)\n\tif err != nil {\n\t\tlogger.WithError(err).WithField(\"address\", s.cfg.Address).Error(\"unable to listen tcp address\")\n\t\treturn err\n\t}\n\n\tlogger.Info(\"starting of grpc server...\")\n\ts.server.Init(ctx, nil)\n\tmaster.RegisterMasterServer(s.server.server, s.server)\n\tif err := s.server.server.Serve(listener); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (s *Server) Start(ctx context.Context, cfg config.Config) error {\n\ts.mtx.Lock()\n\n\tcmtCfg := tmrpcserver.DefaultConfig()\n\tcmtCfg.MaxOpenConnections = int(cfg.API.MaxOpenConnections)\n\tcmtCfg.ReadTimeout = time.Duration(cfg.API.RPCReadTimeout) * time.Second\n\tcmtCfg.WriteTimeout = time.Duration(cfg.API.RPCWriteTimeout) * time.Second\n\tcmtCfg.MaxBodyBytes = int64(cfg.API.RPCMaxBodyBytes)\n\n\tlistener, err := tmrpcserver.Listen(cfg.API.Address, cmtCfg.MaxOpenConnections)\n\tif err != nil {\n\t\ts.mtx.Unlock()\n\t\treturn err\n\t}\n\n\ts.listener = listener\n\ts.mtx.Unlock()\n\n\t// configure grpc-web server\n\tif cfg.GRPC.Enable && cfg.GRPCWeb.Enable {\n\t\tvar options []grpcweb.Option\n\t\tif cfg.API.EnableUnsafeCORS {\n\t\t\toptions = append(options,\n\t\t\t\tgrpcweb.WithOriginFunc(func(origin string) bool {\n\t\t\t\t\treturn true\n\t\t\t\t}),\n\t\t\t)\n\t\t}\n\n\t\twrappedGrpc := grpcweb.WrapServer(s.GRPCSrv, options...)\n\t\ts.Router.PathPrefix(\"/\").Handler(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\t\tif wrappedGrpc.IsGrpcWebRequest(req) {\n\t\t\t\twrappedGrpc.ServeHTTP(w, req)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// Fall back to grpc gateway server.\n\t\t\ts.GRPCGatewayRouter.ServeHTTP(w, req)\n\t\t}))\n\t}\n\n\t// register grpc-gateway routes (after grpc-web server as the first match is used)\n\ts.Router.PathPrefix(\"/\").Handler(s.GRPCGatewayRouter)\n\n\terrCh := make(chan error)\n\n\t// Start the API in an external goroutine as Serve is blocking and will return\n\t// an error upon failure, which we'll send on the error channel that will be\n\t// consumed by the for block below.\n\tgo func(enableUnsafeCORS bool) {\n\t\ts.logger.Info(\"starting API server...\", \"address\", cfg.API.Address)\n\n\t\tif enableUnsafeCORS {\n\t\t\tallowAllCORS := handlers.CORS(handlers.AllowedHeaders([]string{\"Content-Type\"}))\n\t\t\terrCh <- tmrpcserver.Serve(s.listener, allowAllCORS(s.Router), servercmtlog.CometLoggerWrapper{Logger: s.logger}, cmtCfg)\n\t\t} else {\n\t\t\terrCh <- tmrpcserver.Serve(s.listener, s.Router, servercmtlog.CometLoggerWrapper{Logger: s.logger}, cmtCfg)\n\t\t}\n\t}(cfg.API.EnableUnsafeCORS)\n\n\t// Start a blocking select to wait for an indication to stop the server or that\n\t// the server failed to start properly.\n\tselect {\n\tcase <-ctx.Done():\n\t\t// The calling process canceled or closed the provided context, so we must\n\t\t// gracefully stop the API server.\n\t\ts.logger.Info(\"stopping API server...\", \"address\", cfg.API.Address)\n\t\treturn s.Close()\n\n\tcase err := <-errCh:\n\t\ts.logger.Error(\"failed to start API server\", \"err\", err)\n\t\treturn err\n\t}\n}", "func (state *InMemoryState) runServer() {\n\n\tlog.WithFields(log.Fields{\n\t\t\"port\": state.opts.APIServerPort,\n\t}).Info(\"[runServer] starting HTTP server\")\n\tdefer log.Info(\"[runServer] stopping HTTP server\")\n\n\tgo func() {\n\t\t// for debugging:\n\t\t// http://stackoverflow.com/questions/19094099/how-to-dump-goroutine-stacktraces\n\t\tlog.Println(http.ListenAndServe(\":6060\", nil))\n\t}()\n\n\tr := rpc.NewServer()\n\tr.RegisterCodec(rpcjson.NewCodec(), \"application/json\")\n\tr.RegisterCodec(rpcjson.NewCodec(), \"application/json;charset=UTF-8\")\n\tr.RegisterInterceptFunc(rpcInterceptFunc)\n\tr.RegisterAfterFunc(rpcAfterFunc)\n\td := NewDotmeshRPC(state, state.userManager)\n\terr := r.RegisterService(d, \"\") // deduces name from type name\n\tif err != nil {\n\t\tlog.Printf(\"Error while registering services %s\", err)\n\t}\n\n\trouter := mux.NewRouter()\n\n\trouter.Handle(\"/rpc\", Instrument(state)(NewAuthHandler(r, state.userManager)))\n\n\trouter.Handle(\n\t\t\"/filesystems/{filesystem}/{fromSnap}/{toSnap}\",\n\t\tInstrument(state)(NewAuthHandler(state.NewZFSSendingServer(), state.userManager)),\n\t).Methods(\"GET\")\n\n\trouter.Handle(\n\t\t\"/filesystems/{filesystem}/{fromSnap}/{toSnap}\",\n\t\tInstrument(state)(NewAuthHandler(state.NewZFSReceivingServer(), state.userManager)),\n\t).Methods(\"POST\")\n\n\t// display diff since the last commit\n\trouter.Handle(\"/diff/{namespace}:{name}\", Instrument(state)(NewAuthHandler(NewDiffHandler(state), state.userManager))).Methods(\"GET\")\n\trouter.Handle(\"/diff/{namespace}:{name}/{snapshotID}\", Instrument(state)(NewAuthHandler(NewDiffHandler(state), state.userManager))).Methods(\"GET\")\n\n\t// list files in the latest snapshot\n\trouter.Handle(\"/s3/{namespace}:{name}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"GET\")\n\t// list files in a specific snapshot\n\trouter.Handle(\"/s3/{namespace}:{name}/snapshot/{snapshotId}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"GET\")\n\t// download a file from a specific snapshot, or just get its size\n\trouter.Handle(\"/s3/{namespace}:{name}/snapshot/{snapshotId}/{key:.*}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"GET\", \"HEAD\")\n\t// put file into master\n\trouter.Handle(\"/s3/{namespace}:{name}/{key:.*}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"PUT\")\n\t// put file into other branch\n\trouter.Handle(\"/s3/{namespace}:{name}@{branch}/{key:.*}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"PUT\")\n\n\t// delete file on master\n\trouter.Handle(\"/s3/{namespace}:{name}/{key:.*}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"DELETE\")\n\t// delete file on another branch\n\trouter.Handle(\"/s3/{namespace}:{name}@{branch}/{key:.*}\", Instrument(state)(NewAuthHandler(NewS3Handler(state), state.userManager))).Methods(\"DELETE\")\n\n\trouter.HandleFunc(\"/check\",\n\t\tfunc(w http.ResponseWriter, r *http.Request) {\n\t\t\tfmt.Fprintf(w, \"OK\")\n\t\t},\n\t)\n\n\trouter.Handle(\"/metrics\", promhttp.Handler())\n\n\tif os.Getenv(\"PRINT_HTTP_LOGS\") != \"\" {\n\t\tloggingRouter := handlers.LoggingHandler(getLogfile(\"requests\"), router)\n\t\t// TODO: take server port from the config\n\t\terr = http.ListenAndServe(fmt.Sprintf(\":%s\", state.opts.APIServerPort), loggingRouter)\n\t} else {\n\t\terr = http.ListenAndServe(fmt.Sprintf(\":%s\", state.opts.APIServerPort), router)\n\t}\n\n\tif err != nil {\n\t\tutils.Out(fmt.Sprintf(\"Unable to listen on port %s: '%s'\\n\", state.opts.APIServerPort, err))\n\t\tlog.Fatalf(\"Unable to listen on port %s: '%s'\", state.opts.APIServerPort, err)\n\t}\n}", "func RunServer(addr string) error {\n\tmtx.Lock()\n\tdefer mtx.Unlock()\n\tif finalizer == nil {\n\t\tpanic(fmt.Errorf(\"no finalizer registered\"))\n\t}\n\n\tl, err := net.Listen(\"tcp4\", addr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\trpc.Register(&rpcType.Server{request})\n\n\tgo func() {\n\t\trpc.Accept(l)\n\t}()\n\n\tc := make(chan os.Signal)\n\tsignal.Notify(c, os.Interrupt)\n\t<-c\n\n\t// It would be preferable to shut down the RPC\n\t// server first, but that is difficult, so instead\n\t// just take the lock, call the finalizer, and\n\t// return. No more RPC calls will be able to proceed.\n\tinvokeMtx.Lock()\n\tfinalizer()\n\treturn nil\n}", "func main() {\n\taddr := \"localhost:8080\"\n\tclientPublicFilepath := \"../data/client.public.pem\"\n\tprivateFilepath := \"../data/server.private.pem\"\n\tpublicFilepath := \"../data/server.public.pem\"\n\n\t// Generate a self-signed certificate\n\tkeyPair := cert.GenerateRSAKeyOrDie()\n\t_, certBytes := cert.GenerateRootCertOrDie(keyPair, []string{\"localhost\", \"127.0.0.1\"})\n\tcert.WritePrivateKeyAsPEM(keyPair, privateFilepath)\n\tcert.WriteCertAsPEM(certBytes, publicFilepath)\n\n\t// RPC server\n\trpcServer := grpc.NewServer(createGrpcOptions(publicFilepath, privateFilepath)...)\n\trafia.RegisterFarewellerServer(rpcServer, &Farewell{})\n\trafia.RegisterGreeterServer(rpcServer, &Greet{})\n\n\t// Http Server\n\thttpServer := http.NewServeMux()\n\thttpServer.Handle(\"/statusz\", &Statusz{})\n\n\ts := http.Server{\n\t\tAddr: addr,\n\t\tHandler: grpcHandlerFunc(rpcServer, httpServer),\n\t\tTLSConfig: createTLSConfig(clientPublicFilepath),\n\t}\n\n\tlis, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tfmt.Printf(\"Serving on %s\\n\", lis.Addr().String())\n\tif err := s.ServeTLS(lis, publicFilepath, privateFilepath); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (s *SamFSServer) Run() error {\n\tlis, err := net.Listen(\"tcp\", s.port)\n\tif err != nil {\n\t\tglog.Fatalf(\"falied to listen on port :: %s(err=%s)\", s.port, err.Error())\n\t\treturn err\n\t}\n\n\trand.Seed(time.Now().UnixNano())\n\ts.sessionID = rand.Int63()\n\tglog.Infof(\"starting new server with sessionID %d\", s.sessionID)\n\n\tgs := grpc.NewServer()\n\tpb.RegisterNFSServer(gs, s)\n\ts.grpcServer = gs\n\treturn gs.Serve(lis)\n}", "func RunGRPC() {\n\tApp.Init()\n\n\tLog.Info(fmt.Sprintf(\"listening gRPC on port :%d\", Config.Int(\"grpc_port\")))\n\tl, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", Config.Int(\"grpc_port\")))\n\tif err != nil {\n\t\tLog.Fatalf(\"failed: %v\", err)\n\t}\n\n\tvar options []grpc.ServerOption\n\n\tif Config.String(\"grpc_cert\") != \"\" && Config.String(\"grpc_cert_key\") != \"\" {\n\t\tconfig := &tls.Config{}\n\t\tcert, err := tls.LoadX509KeyPair(Config.String(\"grpc_cert\"), Config.String(\"grpc_cert_key\"))\n\t\tif err != nil {\n\t\t\tLog.Fatal(err)\n\t\t}\n\t\tconfig.Certificates = []tls.Certificate{cert}\n\t\toptions = append(options, grpc.Creds(credentials.NewTLS(config)))\n\t}\n\n\tif len(App.grpcUnaryInterceptors) > 0 {\n\t\toptions = append(options, grpc.UnaryInterceptor(ServerUnaryInterceptor()))\n\t}\n\n\tif len(App.grpcStreamInterceptors) > 0 {\n\t\toptions = append(options, grpc.StreamInterceptor(ServerStreamInterceptor()))\n\t}\n\n\tApp.GRPCServer = NewGRPC(options...)\n\n\t// Initialize gRPC services\n\tLog.Info(\"Registering gRPC services\")\n\n\tvar args []reflect.Value\n\targs = append(args, reflect.ValueOf(App.GRPCServer.Server))\n\n\tfor _, c := range App.grpcServices {\n\t\tmethod := reflect.ValueOf(c).MethodByName(\"Init\")\n\t\tif method.IsValid() {\n\t\t\tmethod.Call(args)\n\t\t}\n\t}\n\n\tLog.Error(App.GRPCServer.Serve(l))\n}", "func RunServer() error {\n\tctx := context.Background()\n\n\t//get configuration\n\n\tvar cfg Config\n\tflag.StringVar(&cfg.GRPCPort, \"grpc-port\", \"\", \"gRPC port to bind\")\n\tflag.StringVar(&cfg.HTTPPort, \"http-port\", \"\", \"HTTP port to bind\")\n\tflag.StringVar(&cfg.DBHost, \"host\", \"\", \"Database host\")\n\tflag.StringVar(&cfg.DBUser, \"user\", \"\", \"Database user\")\n\tflag.StringVar(&cfg.DBPassword, \"password\", \"\", \"Database password\")\n\tflag.StringVar(&cfg.DBName, \"db\", \"\", \"Database name\")\n\tflag.IntVar(&cfg.LogLevel, \"log-level\", 0, \"Global log level\")\n\tflag.StringVar(&cfg.LogTimeFormat, \"log-time-format\", \"\", \"Print time format for logger e.g. 2006-01-02T15:04:05Z07:00\")\n\tflag.StringVar(&cfg.DBMigrations, \"migrations\", \"\", \"Database schema migrations\")\n\tflag.Parse()\n\n\tif len(cfg.GRPCPort) == 0 {\n\t\treturn fmt.Errorf(\"invalid TCP port for gRPC server: '%s'\", cfg.GRPCPort)\n\t}\n\n\tif len(cfg.HTTPPort) == 0 {\n\t\treturn fmt.Errorf(\"invalid TCP port for HTTP gateway: '%s'\", cfg.HTTPPort)\n\t}\n\n\tif len(cfg.DBMigrations) == 0 {\n\t\treturn fmt.Errorf(\"invalid database migrations path: '%s'\", cfg.DBMigrations)\n\t}\n\n\tif err := logger.Init(cfg.LogLevel, cfg.LogTimeFormat); err != nil {\n\t\treturn fmt.Errorf(\"failed to initialize logger: %v\", err)\n\t}\n\t//Lets chek if migrations Directory path exists\n\tif _, err := os.Stat(cfg.DBMigrations); os.IsNotExist(err) {\n\t\treturn fmt.Errorf(\"you need to provide the path to directory where your migrations are stored: -migrations <migrations_path>\")\n\t}\n\t//add MySQL driver specific parameter to parse date/time\n\t//Drop it for another database\n\tparam := \"parseTime=true\"\n\n\tdsn := fmt.Sprintf(\"%s:%s@tcp(%s)/%s?%s\", cfg.DBUser, cfg.DBPassword, cfg.DBHost, cfg.DBName, param)\n\n\tdb, err := sql.Open(\"mysql\", dsn)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to open database: %v\", err)\n\t}\n\tdefer db.Close()\n\n\t//Lets run our migrations first\n\tif err := runMigrations(db, cfg.DBMigrations); err != nil {\n\t\treturn fmt.Errorf(\"failed to run migrations: %v\", err)\n\t}\n\n\tv1API := v1.NewToDoServiceServer(db)\n\n\t//run HTTP/REST gateway\n\tgo func() {\n\t\t_ = rest.RunServer(ctx, cfg.GRPCPort, cfg.HTTPPort)\n\t}()\n\n\treturn grpc.RunServer(ctx, v1API, cfg.GRPCPort)\n}", "func (g *GRPC) Run() error {\n\tvar err error\n\tg.listener, err = net.Listen(connProtocol, fmt.Sprintf(\":%s\", g.port))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tgo g.serve()\n\treturn nil\n}", "func (s *server) Run() error {\n\ts.logger.Info(\"starting http server\", logger.String(\"addr\", s.server.Addr))\n\ts.server.Handler = s.gin\n\t// Open listener.\n\ttrackedListener, err := conntrack.NewTrackedListener(\"tcp\", s.addr, s.r)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn s.server.Serve(trackedListener)\n}", "func (g *Server) Start() error {\n\tif len(g.registrars) == 0 {\n\t\treturn errors.New(\"No registration method added. impossible to boot\")\n\t}\n\tlisten, err := net.Listen(\"tcp\", \":\"+g.config.Port)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// The code below is to bootstrap a multiplexed server\n\t// this is necessary to create healthcheck endpoint from regular LoadBalancers\n\t// as they generate only use HTTP or TCP\n\n\t// creating multiplexed server\n\tmux := cmux.New(listen)\n\n\t// Matching connections by priority order\n\tgrpcListener := mux.Match(cmux.HTTP2HeaderField(\"content-type\", \"application/grpc\"))\n\t// used for health checks\n\thttpListener := mux.Match(cmux.Any())\n\n\t// initiating grpc server\n\tgrpcServer := grpc.NewServer()\n\t// registering handlers\n\tfor _, r := range g.registrars {\n\t\tr(grpcServer)\n\t}\n\treflection.Register(grpcServer)\n\n\t// creating http server\n\thttpServer := http.NewServeMux()\n\thttpServer.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintf(w, \"OK\")\n\t})\n\n\thttpS := &http.Server{\n\t\tHandler: httpServer,\n\t}\n\n\t// starting it all\n\tgo grpcServer.Serve(grpcListener)\n\tgo httpS.Serve(httpListener)\n\t// will periodically free memory if set\n\tif g.config.PeriodicMemory > 0 {\n\t\tgo g.PeriodicFree(g.config.PeriodicMemory)\n\t}\n\n\t// Start serving...\n\treturn mux.Serve()\n}", "func StartServer(cleanUpChan chan int){\n\tGrpcServer = &Server{\n CleanUpChan:cleanUpChan ,\n\t GrpcServer: grpc.NewServer(),\n\t}\n\tregisterGrpcServices(GrpcServer.GrpcServer)\n\tif err := GrpcServer.GrpcServer.Serve(getListner(port)); err != nil {\n\t\tpanic(err)\n\t}\n}", "func main() {\n\tctx := context.Background()\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\t// New server multiplexer\n\tmux := runtime.NewServeMux()\n\topts := []grpc.DialOption{grpc.WithInsecure()}\n\n\t// Our gRPC host address\n\tconn := os.Getenv(\"SERVICE_ADDRESS\")\n\tapiAddress := os.Getenv(\"API_ADDRESS\")\n\n\tlog.Printf(\"Connecting to gRPC server on: %s\\n\", conn)\n\tlog.Printf(\"Starting API on: %s\\n\", apiAddress)\n\n\t// Register the handler to an endpoint\n\terr := gw.RegisterUserServiceHandlerFromEndpoint(ctx, mux, conn, opts)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// Return a server instance\n\thttp.ListenAndServe(apiAddress, mux)\n}", "func (server *Server) Run(ctx context.Context) error {\n\tctx = log.ContextWithPrefix(ctx, logPrefix)\n\n\tgoamux := goahttp.NewMuxer()\n\n\tgroupServices, ctx := errgroup.WithContext(ctx)\n\tvar servers goahttp.Servers\n\tfor _, service := range server.services {\n\t\tgroupServices.Go(func() error {\n\t\t\treturn service.Run(ctx)\n\t\t})\n\t\tservers = append(servers, service.Mount(ctx, goamux))\n\t}\n\tservers.Use(goahttpmiddleware.Debug(goamux, os.Stdout))\n\n\tvar handler http.Handler = goamux\n\n\thandler = Recovery()(handler)\n\thandler = Log(ctx)(handler)\n\thandler = goahttpmiddleware.RequestID()(handler)\n\n\tmux := http.NewServeMux()\n\tmux.Handle(\"/\", handler)\n\tmux.Handle(\"/swagger/swagger.json\", handler)\n\n\tif server.config.Swagger {\n\t\tmux.Handle(\"/swagger/\", http.FileServer(http.FS(docs.SwaggerContent)))\n\t}\n\n\taddr := net.JoinHostPort(server.config.Hostname, strconv.Itoa(server.config.Port))\n\tsrv := &http.Server{Addr: addr, Handler: mux}\n\n\terrCh := make(chan error, 1)\n\tgo func() {\n\t\tdefer errors.Recover(log.Fatal)\n\n\t\t<-ctx.Done()\n\t\tlog.WithContext(ctx).Infof(\"Shutting down REST server on %q\", addr)\n\n\t\tctx, cancel := context.WithTimeout(ctx, server.shutdownTimeout)\n\t\tdefer cancel()\n\n\t\tsrv.SetKeepAlivesEnabled(false)\n\t\tif err := srv.Shutdown(ctx); err != nil {\n\t\t\terrCh <- errors.Errorf(\"gracefully shutdown the server: %w\", err)\n\t\t}\n\t\tclose(errCh)\n\t}()\n\n\tlog.WithContext(ctx).Infof(\"REST server is listening on %q\", addr)\n\tif err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\treturn errors.Errorf(\"error starting server: %w\", err)\n\t}\n\tdefer log.WithContext(ctx).Infof(\"Server stoped\")\n\n\tif err := groupServices.Wait(); err != nil {\n\t\treturn err\n\t}\n\treturn <-errCh\n}", "func (s *Server) Run() error {\n\t// start fetcher, reporter and doc generator in goroutines\n\tgo s.fetcher.Run()\n\tgo s.reporter.Run()\n\tgo s.docGenerator.Run()\n\n\t// start webserver\n\tlistenAddress := s.listenAddress\n\tif listenAddress == \"\" {\n\t\tlistenAddress = DefaultAddress\n\t}\n\n\tr := mux.NewRouter()\n\n\t// register ping api\n\tr.HandleFunc(\"/_ping\", pingHandler).Methods(\"GET\")\n\n\t// github webhook API\n\tr.HandleFunc(\"/events\", s.gitHubEventHandler).Methods(\"POST\")\n\n\t// travisCI webhook API\n\tr.HandleFunc(\"/ci_notifications\", s.ciNotificationHandler).Methods(\"POST\")\n\n\tlogrus.Infof(\"start http server on address %s\", listenAddress)\n\treturn http.ListenAndServe(listenAddress, r)\n}", "func (c *configuration) RunGrpcServers(grpcServer *grpc.Server) {\n\t// just return if grpc server is not enabled\n\tif !c.Properties.Server.Enabled || grpcServer == nil {\n\t\treturn\n\t}\n\n\taddress := c.Properties.Server.Host + \":\" + c.Properties.Server.Port\n\tlis, err := net.Listen(c.Properties.Server.Network, address)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v, %v\", address, err)\n\t}\n\n\t// register server\n\t// Register reflection service on gRPC server.\n\tchn := make(chan bool)\n\tgo func() {\n\t\tfor _, srv := range grpcServers {\n\t\t\treflector.CallFunc(srv.cb, grpcServer, srv.svc)\n\t\t\tsvcName, err := reflector.GetName(srv.svc)\n\t\t\tif err == nil {\n\t\t\t\tlog.Infof(\"Registered %v on gRPC server\", svcName)\n\t\t\t}\n\t\t}\n\t\treflection.Register(grpcServer)\n\t\tchn <- true\n\t\tif err := grpcServer.Serve(lis); err != nil {\n\t\t\tfmt.Printf(\"failed to serve: %v\", err)\n\t\t}\n\t\tfmt.Printf(\"gRPC server exit\\n\")\n\t}()\n\t<-chn\n\n\tlog.Infof(\"gRPC server listening on: localhost%v\", address)\n}", "func run() error {\n\tlistenOn := \"127.0.0.1:8080\"\n\tlistener, err := net.Listen(\"tcp\", listenOn)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to listen on %s: %w\", listenOn, err)\n\t}\n\n\tserver := grpc.NewServer()\n\tuserv1.RegisterUserServiceServer(server, &userServiceServer{})\n\tlog.Println(\"Listening on\", listenOn)\n\n\tif err := server.Serve(listener); err != nil {\n\t\treturn fmt.Errorf(\"failed to serve gRPC server: %w\", err)\n\t}\n\n\treturn nil\n}", "func (s *Service) RunServe(ctxStartup, ctxShutdown Ctx, shutdown func()) (err error) {\n\tlog := structlog.FromContext(ctxShutdown, nil)\n\tif s.cfg == nil {\n\t\ts.cfg, err = config.GetServe()\n\t}\n\tif err == nil {\n\t\ts.ca, err = netx.LoadCACert(s.cfg.TLSCACert)\n\t}\n\tif err == nil {\n\t\ts.cert, err = tls.LoadX509KeyPair(s.cfg.TLSCert, s.cfg.TLSKey)\n\t}\n\tif err == nil {\n\t\ts.certInt, err = tls.LoadX509KeyPair(s.cfg.TLSCertInt, s.cfg.TLSKeyInt)\n\t}\n\tif err != nil {\n\t\treturn log.Err(\"failed to get config\", \"err\", err)\n\t}\n\n\terr = concurrent.Setup(ctxStartup, map[interface{}]concurrent.SetupFunc{\n\t\t&s.repo: s.connectRepo,\n\t})\n\tif err != nil {\n\t\treturn log.Err(\"failed to connect\", \"err\", err)\n\t}\n\n\tif s.appl == nil {\n\t\ts.appl = app.New(s.repo, app.Config{\n\t\t\tSecret: s.cfg.Secret,\n\t\t})\n\t}\n\n\ts.srv = grpc.NewServer(s.appl, grpc.Config{\n\t\tCtxShutdown: ctxShutdown,\n\t\tCert: &s.cert,\n\t})\n\ts.srvInt = grpc.NewServerInt(s.appl, grpc.Config{\n\t\tCtxShutdown: ctxShutdown,\n\t\tCert: &s.certInt,\n\t})\n\ts.mux, err = grpcgw.NewServer(grpcgw.Config{\n\t\tCtxShutdown: ctxShutdown,\n\t\tEndpoint: s.cfg.AuthAddr,\n\t\tCA: s.ca,\n\t\tGRPCGWPattern: \"/\",\n\t\tOpenAPIPattern: \"/openapi/\", // Also hardcoded in web/static/swagger-ui/index.html.\n\t\tSwaggerUIPattern: \"/swagger-ui/\",\n\t})\n\tif err != nil {\n\t\treturn log.Err(\"failed to setup grpc-gateway\", \"err\", err)\n\t}\n\n\terr = concurrent.Serve(ctxShutdown, shutdown,\n\t\ts.serveMetrics,\n\t\ts.serveGRPC,\n\t\ts.serveGRPCInt,\n\t\ts.serveGRPCGW,\n\t)\n\tif err != nil {\n\t\treturn log.Err(\"failed to serve\", \"err\", err)\n\t}\n\treturn nil\n}", "func startServer(grpcServer *grpc.Server, listener net.Listener) {\n\tif err := grpcServer.Serve(listener); err != nil {\n\t\tlog.Fatalf(\"Error running server: %server\", err)\n\t}\n}", "func Run(address string) (*grpc.Server, net.Listener, error) {\n\tfmt.Println(\"starting the grpc server\")\n\n\tgs := grpc.NewServer()\n\tsrv := &Server{}\n\tRegisterServerServer(gs, srv)\n\n\tl, err := net.Listen(\"tcp\", address)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"listen %v\", err)\n\t}\n\n\tgo func() {\n\t\terr := gs.Serve(l)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}()\n\n\treturn gs, l, nil\n}", "func RunServer() {\n\tserverMutex.Lock()\n\tdefer serverMutex.Unlock()\n\tif serverStarted {\n\t\treturn\n\t}\n\n\terr := initialiseServer()\n\tif err != nil {\n\t\tlog.Panicln(\"server:\", err)\n\t}\n\tlog.Println(\"server: Server starting at\", config.Address)\n\tserverStarted = true\n\tgo func() {\n\t\terr = server.ListenAndServe()\n\t\tif err != http.ErrServerClosed {\n\t\t\tlog.Println(\"server:\", err)\n\t\t}\n\t}()\n}", "func (s *Server) Run() {\n\t// use default mux\n\thealth := healthcheck.NewHandler()\n\ts.router.Handle(\"/\", health)\n\ts.router.Handle(\"/metrics\", promhttp.Handler())\n\n\ts.router.HandleFunc(fmt.Sprintf(\"/v1/models/%s:predict\", s.options.ModelName), s.PredictHandler).Methods(\"POST\")\n\n\toperationName := nethttp.OperationNameFunc(func(r *http.Request) string {\n\t\treturn fmt.Sprintf(\"%s %s\", r.Method, r.URL.Path)\n\t})\n\n\taddr := fmt.Sprintf(\":%s\", s.options.Port)\n\tsrv := &http.Server{\n\t\tAddr: addr,\n\t\tHandler: nethttp.Middleware(opentracing.GlobalTracer(), s.router, operationName),\n\t\tWriteTimeout: s.options.HTTPServerTimeout,\n\t\tReadTimeout: s.options.HTTPServerTimeout,\n\t\tIdleTimeout: 2 * s.options.HTTPServerTimeout,\n\t}\n\n\tstopCh := setupSignalHandler()\n\terrCh := make(chan error, 1)\n\tgo func() {\n\t\ts.logger.Info(\"starting standard transformer at : \" + addr)\n\t\t// Don't forward ErrServerClosed as that indicates we're already shutting down.\n\t\tif err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\t\terrCh <- errors.Wrapf(err, \"server failed\")\n\t\t}\n\t\ts.logger.Info(\"server shut down successfully\")\n\t}()\n\n\t// Exit as soon as we see a shutdown signal or the server failed.\n\tselect {\n\tcase <-stopCh:\n\tcase err := <-errCh:\n\t\ts.logger.Error(fmt.Sprintf(\"failed to run HTTP server: %v\", err))\n\t}\n\n\ts.logger.Info(\"server shutting down...\")\n\n\tif err := srv.Shutdown(context.Background()); err != nil {\n\t\ts.logger.Error(fmt.Sprintf(\"failed to shutdown HTTP server: %v\", err))\n\t}\n}", "func (s *Server) Serve() (err error) {\n\t// Initialize the gRPC server\n\ts.srv = grpc.NewServer()\n\tpb.RegisterTRISADemoServer(s.srv, s)\n\tpb.RegisterTRISAIntegrationServer(s.srv, s)\n\n\t// Catch OS signals for graceful shutdowns\n\tquit := make(chan os.Signal, 1)\n\tsignal.Notify(quit, os.Interrupt)\n\tgo func() {\n\t\t<-quit\n\t\ts.echan <- s.Shutdown()\n\t}()\n\n\t// Run the TRISA service on the TRISABindAddr\n\tif err = s.trisa.Serve(); err != nil {\n\t\treturn err\n\t}\n\n\t// Listen for TCP requests on the specified address and port\n\tvar sock net.Listener\n\tif sock, err = net.Listen(\"tcp\", s.conf.BindAddr); err != nil {\n\t\treturn fmt.Errorf(\"could not listen on %q\", s.conf.BindAddr)\n\t}\n\tdefer sock.Close()\n\n\t// Run the server\n\tgo func() {\n\t\tlog.Info().\n\t\t\tStr(\"listen\", s.conf.BindAddr).\n\t\t\tStr(\"version\", pkg.Version()).\n\t\t\tStr(\"name\", s.vasp.Name).\n\t\t\tMsg(\"server started\")\n\n\t\tif err := s.srv.Serve(sock); err != nil {\n\t\t\ts.echan <- err\n\t\t}\n\t}()\n\n\t// Listen for any errors that might have occurred and wait for all go routines to finish\n\tif err = <-s.echan; err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (s *Server) Run(mux *http.ServeMux, port string) error {\n\ts.callbackServer = &http.Server{\n\t\tAddr: \":\" + port,\n\t\tMaxHeaderBytes: 1 << 20,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tHandler: mux,\n\t}\n\treturn s.callbackServer.ListenAndServeTLS(\"/cert/server.crt\", \"/cert/server.key\")\n}", "func Run() error {\n\tcloseLogger, err := setupLogger()\n\tif err != nil {\n\t\treturn fail.Wrap(err)\n\t}\n\tdefer closeLogger()\n\n\ts := grapiserver.New(\n\t\tgrapiserver.WithGrpcServerUnaryInterceptors(\n\t\t\tgrpc_ctxtags.UnaryServerInterceptor(grpc_ctxtags.WithFieldExtractor(grpc_ctxtags.CodeGenRequestFieldExtractor)),\n\t\t\tgrpc_zap.UnaryServerInterceptor(zap.L()),\n\t\t\tgrpc_zap.PayloadUnaryServerInterceptor(\n\t\t\t\tzap.L(),\n\t\t\t\tfunc(ctx context.Context, fullMethodName string, servingObject interface{}) bool { return true },\n\t\t\t),\n\t\t),\n\t\tgrapiserver.WithGatewayServerMiddlewares(\n\t\t\tgithubEventDispatcher,\n\t\t),\n\t\tgrapiserver.WithServers(\n\t\t\tgithub.NewInstallationEventServiceServer(),\n\t\t),\n\t)\n\treturn s.Serve()\n}", "func Run(http_handler http.Handler, https_handler http.Handler) {\n\n\tvar server Server\n\tvar port int\n\tvar error error\n\n\tserver.Hostname = os.Getenv(\"HOSTNAME\")\n\tserver.UseHTTP = true\n\tserver.UseHTTPS = false\n\t\n\tport, error = strconv.Atoi(os.Getenv(\"HTTP_PORT\"))\n\tif error != nil {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n\n\tserver.HTTPPort = port\n\tif server.HTTPPort == 0 {\n\t\tserver.HTTPPort = 8000\n\t}\n\n\tport, error = strconv.Atoi(os.Getenv(\"HTTPS_PORT\"))\n\tif error != nil {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n\n\tserver.HTTPSPort = port\n\tif server.HTTPSPort == 0 {\n\t\tserver.HTTPSPort = 8443\n\t}\n\tserver.CertFile = os.Getenv(\"SSL_CERTIFICATE_FILE\")\n\tserver.KeyFile = os.Getenv(\"SSL_KEY_FILE\")\n\n\tif server.UseHTTP && server.UseHTTPS {\n\t\tgo func() {\n\t\t\tstart_HTTPS(https_handler, server)\n\t\t}()\n\n\t\tstart_HTTP(http_handler, server)\n\t} else if server.UseHTTP {\n\t\tstart_HTTP(http_handler, server)\n\t} else if server.UseHTTPS {\n\t\tstart_HTTPS(https_handler, server)\n\t} else {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n}", "func Run() error {\n\ts := grapiserver.New(\n\t\tgrapiserver.WithDefaultLogger(),\n\t\tgrapiserver.WithGrpcAddr(\"tcp\", \":3000\"),\n\t\tgrapiserver.WithGatewayAddr(\"tcp\", \":4000\"),\n\t\tgrapiserver.WithServers(\n\t\t\tserver.NewInvoiceServiceServer(),\n\t\t),\n\t)\n\treturn s.Serve()\n}", "func (s *Server) Run() error {\n\ts.setRoutes()\n\n\thttpUrl := \":\" + strconv.Itoa(s.Config.HttpPort)\n\thttpsUrl := \":\" + strconv.Itoa(s.Config.HttpsPort)\n\ts.newServer(httpsUrl, httpUrl)\n\n\tgo s.httpServer.ListenAndServe()\n\treturn s.httpsServer.ListenAndServeTLS(s.Config.CertFile, s.Config.KeyFile)\n}", "func (s *Server) Run() error {\n\topentracing.SetGlobalTracer(s.Tracer)\n\n\tif s.Port == 0 {\n\t\treturn fmt.Errorf(\"server port must be set\")\n\t}\n\n\ts.uuid = uuid.New().String()\n\n\topts := []grpc.ServerOption{\n\t\tgrpc.KeepaliveParams(keepalive.ServerParameters{\n\t\t\tTimeout: 120 * time.Second,\n\t\t}),\n\t\tgrpc.KeepaliveEnforcementPolicy(keepalive.EnforcementPolicy{\n\t\t\tPermitWithoutStream: true,\n\t\t}),\n\t\tgrpc.UnaryInterceptor(\n\t\t\totgrpc.OpenTracingServerInterceptor(s.Tracer),\n\t\t),\n\t}\n\n\tif tlsopt := tls.GetServerOpt(); tlsopt != nil {\n\t\topts = append(opts, tlsopt)\n\t}\n\n\tsrv := grpc.NewServer(opts...)\n\n\tpb.RegisterReservationServer(srv, s)\n\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", s.Port))\n\tif err != nil {\n\t\tlog.Fatal().Msgf(\"failed to listen: %v\", err)\n\t}\n\n\t// register the service\n\t// jsonFile, err := os.Open(\"config.json\")\n\t// if err != nil {\n\t// \tfmt.Println(err)\n\t// }\n\n\t// defer jsonFile.Close()\n\n\t// byteValue, _ := ioutil.ReadAll(jsonFile)\n\n\t// var result map[string]string\n\t// json.Unmarshal([]byte(byteValue), &result)\n\n\tlog.Trace().Msgf(\"In reservation s.IpAddr = %s, port = %d\", s.IpAddr, s.Port)\n\n\terr = s.Registry.Register(name, s.uuid, s.IpAddr, s.Port)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed register: %v\", err)\n\t}\n\tlog.Info().Msg(\"Successfully registered in consul\")\n\n\treturn srv.Serve(lis)\n}", "func RunHTTPServer() (err error) {\n\tif !Conf.API.Enabled {\n\t\tLogAccess.Debug(\"HTTPD server is disabled.\")\n\t\treturn nil\n\t}\n\n\tLogAccess.Infof(\"HTTPD server is running on %s:%d.\", Conf.API.Address, Conf.API.Port)\n\t/* if Conf.Core.AutoTLS.Enabled {\n\t\ts := autoTLSServer()\n\t\terr = s.ListenAndServeTLS(\"\", \"\")\n\t} else if Conf.Core.SSL && Conf.Core.CertPath != \"\" && Conf.Core.KeyPath != \"\" {\n\t\terr = http.ListenAndServeTLS(Conf.Core.Address+\":\"+Conf.Core.Port, Conf.Core.CertPath, Conf.Core.KeyPath, routerEngine())\n\t} else { */\n\thttpSrv = &http.Server{\n\t\tAddr: fmt.Sprintf(\"%s:%d\", Conf.API.Address, Conf.API.Port),\n\t\tHandler: routerEngine(),\n\t}\n\terr = httpSrv.ListenAndServe()\n\t// }\n\n\tif err != http.ErrServerClosed {\n\t\tLogError.Errorf(\"HTTP server ListenAndServe returned error: %v\", err)\n\t\treturn err\n\t}\n\tLogAccess.Warn(\"RunHTTPServer canceled.\")\n\treturn nil\n}", "func (grpca Adapter) Run() {\n\tvar err error\n\tlisten, err := net.Listen(\"tcp\", \":9000\")\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to listen on port 9000 %v\", err)\n\t}\n\n\tarithmeticServiceServer := grpca\n\tgrpcServer := grpc.NewServer()\n\tpb.RegisterArithmeticServiceServer(grpcServer, arithmeticServiceServer)\n\tif err := grpcServer.Serve(listen); err != nil {\n\t\tlog.Fatalf(\"Failed to server gRPC over port 9000 %v\", err)\n\t}\n}", "func main() {\n\t// create a listener on TCP port 7777\n\tlis, err := net.Listen(\"tcp\", \":7777\")\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\n\t// create a server instance\n\ts := api.Server{}\n\n\t// create the TLS creds\n\tcreds, err := credentials.NewServerTLSFromFile(\"cert/server.crt\", \"cert/server.key\")\n\tif err != nil {\n\t\tlog.Fatalf(\"could not load TLS keys: %s\", err)\n\t}\n\n\t// add credentials to the gRPC options\n\topts := []grpc.ServerOption{grpc.Creds(creds)}\n\n\t// create a gRPC server object\n\tgrpcServer := grpc.NewServer(opts...)\n\n\t// attach the Ping service to the server\n\tapi.RegisterPingServer(grpcServer, &s)\n\n\t// start the server\n\tif err := grpcServer.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func (s *Server) Run() <-chan error {\n\tvar chErr chan error\n\tlog.Infoln(fmt.Sprintf(\"gRPC server has started at port %d\", s.port))\n\tgo func() {\n\t\tif err := s.server.Serve(s.listener); err != nil {\n\t\t\tchErr <- err\n\t\t}\n\t}()\n\treturn chErr\n}", "func (p *Service) Run(port string, certChain *dapr_credentials.CertChain) {\n\tvar err error\n\tp.serverListener, err = net.Listen(\"tcp\", fmt.Sprintf(\":%s\", port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\n\topts, err := dapr_credentials.GetServerOptions(certChain)\n\tif err != nil {\n\t\tlog.Fatalf(\"error creating gRPC options: %s\", err)\n\t}\n\tp.grpcServer = grpc.NewServer(opts...)\n\tplacementv1pb.RegisterPlacementServer(p.grpcServer, p)\n\n\tif err := p.grpcServer.Serve(p.serverListener); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func (guistats *serverStats) runServer(server string) {\n\thttp.HandleFunc(\"/guiserver\", guistats.sendGraphStats)\n\thttp.ListenAndServe(fmt.Sprint(\":\",server), nil)\n\tlog.Printf(\"GUI Server running from port %s\", server)\n}", "func (s *Servers) initiateGRPCServer(endpoint string, config RuntimeConfig) error {\n\taddr := fmt.Sprintf(\"%s:%d\", endpoint, config.port)\n\tln, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlog.Fatalf(\"gRPC server: failed to listen: %v\", err)\n\t\tos.Exit(2)\n\t}\n\ts.gRPCListener = ln\n\n\topts := []grpc.ServerOption{\n\t\ts.getUnaryInterceptors(),\n\t\t// grpc.ChainUnaryInterceptor(\n\t\t// \ts.Backend.AuthInterceptor.Unary(),\n\t\t// \ts.Backend.ObserverRegistry.UnaryInterceptor,\n\t\t// ),\n\t\t// MaxConnectionAge is just to avoid long connection, to facilitate load balancing\n\t\t// MaxConnectionAgeGrace will torn them, default to infinity\n\t\tgrpc.KeepaliveParams(keepalive.ServerParameters{MaxConnectionAge: 2 * time.Minute}),\n\t}\n\n\t// load mutual TLS cert/key and root CA cert\n\tif config.tlsCaCert != \"\" && config.tlsCert != \"\" && config.tlsKey != \"\" {\n\t\tkeyPair, err := tls.LoadX509KeyPair(config.tlsCert, config.tlsKey)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to load server TLS cert/key with error:%v\", err)\n\t\t}\n\n\t\tcert, err := ioutil.ReadFile(config.tlsCaCert)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to load root CA cert file with error:%v\", err)\n\t\t}\n\n\t\tpool := x509.NewCertPool()\n\t\tpool.AppendCertsFromPEM(cert)\n\n\t\tta := credentials.NewTLS(&tls.Config{\n\t\t\tCertificates: []tls.Certificate{keyPair},\n\t\t\tClientCAs: pool,\n\t\t\tClientAuth: tls.RequireAndVerifyClientCert,\n\t\t})\n\n\t\topts = append(opts, grpc.Creds(ta))\n\t}\n\n\ts.gRPCServer = grpc.NewServer(opts...)\n\n\ts.registerGRPCService()\n\tlog.Printf(\"gRPC server serving at %s\", addr)\n\n\tfb := fallback.NewServer(fmt.Sprintf(\"%s:%d\", endpoint, config.fallbackPort), fmt.Sprintf(\"%s:%d\", endpoint, config.port))\n\ts.fallbackServer = fb\n\n\t// Register reflection service on gRPC server.\n\treflection.Register(s.gRPCServer)\n\n\treturn s.gRPCServer.Serve(ln)\n}", "func (p *Proxy) gRPCServe() {\n\tentry := log.WithField(\"address\", p.grpcListenAddress)\n\tentry.Info(\"Starting gRPC server\")\n\tif err := p.grpcServer.Serve(p.grpcListenAddress); err != nil {\n\t\tentry.WithError(err).Error(\"gRPC server was not shut down cleanly\")\n\t}\n\n\tentry.Info(\"Stopped gRPC server\")\n}", "func RunHTTPServer(ctx context.Context, createHandler func(router chi.Router) http.Handler) {\n\tapiRouter := chi.NewRouter()\n\tsetMiddlewares(apiRouter)\n\n\trootRouter := chi.NewRouter()\n\trootRouter.Mount(\"/api\", createHandler(apiRouter))\n\n\tsrv := &http.Server{\n\t\tAddr: \":\" + os.Getenv(\"PORT\"),\n\t\tHandler: rootRouter,\n\t}\n\n\tvar wg sync.WaitGroup\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\n\t\t_ = srv.ListenAndServe()\n\t}()\n\n\tsigs := make(chan os.Signal, 1)\n\tsignal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)\n\n\tgo func() {\n\t\t_ = <-sigs\n\t\t_ = srv.Shutdown(ctx)\n\t}()\n\n\twg.Wait()\n}", "func main() {\n\tvar (\n\t\trepository = storage.NewInMemory()\n\t\tportServer = service.NewPortServer(repository)\n\t)\n\n\tlistener, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", port))\n\tif err != nil {\n\t\tlog.Fatalf(\"cannot start the server: %s\", err)\n\t}\n\n\tgrpcServer := grpc.NewServer()\n\n\tproto.RegisterStorageServer(grpcServer, portServer)\n\n\tlog.Printf(\"starting GRPC server on %s\", listener.Addr().String())\n\tif err := grpcServer.Serve(listener); err != nil {\n\t\tlog.Fatalf(\"failed to start the server: %s\", err)\n\t}\n}", "func (s *Server) Run(port string) (err error) {\n\tlis, err := net.Listen(\"tcp\", port)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsrv := grpc.NewServer()\n\tproto.RegisterNeuroServer(srv, s)\n\tfmt.Println(\"running\")\n\terr = srv.Serve(lis) // this will block\n\treturn err\n}", "func RunServer(c *cli.Context) error {\n\treqm, err := nats.NewRequest(\n\t\tc.String(\"messaging-host\"),\n\t\tc.String(\"messaging-port\"),\n\t\tgnats.MaxReconnects(-1),\n\t\tgnats.ReconnectWait(2*time.Second),\n\t)\n\tif err != nil {\n\t\treturn cli.NewExitError(\n\t\t\tfmt.Sprintf(\"cannot connect to messaging server %s\", err.Error()),\n\t\t\t2,\n\t\t)\n\t}\n\tconfig, err := readSecretConfig(c)\n\tif err != nil {\n\t\treturn cli.NewExitError(fmt.Sprintf(\"Unable to secret config file %q\\n\", err), 2)\n\t}\n\tjt, err := parseJwtKeys(c)\n\tif err != nil {\n\t\treturn cli.NewExitError(fmt.Sprintf(\"Unable to parse keys %q\\n\", err), 2)\n\t}\n\t// sets the reply messaging connection\n\tjt.Request = reqm\n\tjt.Topics = map[string]string{\n\t\t\"userExists\": \"UserService.Exist\",\n\t\t\"userGet\": \"UserService.Get\",\n\t\t\"identityExists\": \"IdentityService.Exist\",\n\t\t\"identityGet\": \"IdentityService.GetIdentity\",\n\t}\n\tloggerMw, err := getLoggerMiddleware(c)\n\tif err != nil {\n\t\treturn cli.NewExitError(fmt.Sprintf(\"unable to get logger middlware %s\", err), 2)\n\t}\n\tcors := cors.New(cors.Options{\n\t\tAllowedOrigins: []string{\"*\"},\n\t\tAllowCredentials: true,\n\t\tOptionsPassthrough: true,\n\t})\n\n\tr := chi.NewRouter()\n\tr.Use(middleware.RequestID)\n\tr.Use(middleware.RealIP)\n\tr.Use(loggerMw.Middleware)\n\tr.Use(middleware.Recoverer)\n\tr.Use(cors.Handler)\n\t// Default health check\n\tr.Get(\"/healthz\", func(w http.ResponseWriter, r *http.Request) {\n\t\tif !reqm.IsActive() {\n\t\t\thttp.Error(w, \"messaging server is disconnected\", http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t\tw.Write([]byte(\"okay\"))\n\t})\n\tgoogleMw := middlewares.GetGoogleMiddleware(config)\n\tfbookMw := middlewares.GetFacebookMiddleware(config)\n\tlinkedInMw := middlewares.GetLinkedinMiddleware(config)\n\tOrcidMw := middlewares.GetOrcidMiddleware(config)\n\tr.Route(\"/tokens\", func(r chi.Router) {\n\t\tr.With(googleMw.ParamsMiddleware).\n\t\t\tWith(googleMw.GoogleMiddleware).Post(\"/google\", jt.JwtHandler)\n\t\tr.With(fbookMw.ParamsMiddleware).\n\t\t\tWith(fbookMw.FacebookMiddleware).Post(\"/facebook\", jt.JwtHandler)\n\t\tr.With(linkedInMw.ParamsMiddleware).\n\t\t\tWith(linkedInMw.LinkedInMiddleware).Post(\"/linkedin\", jt.JwtHandler)\n\t\tr.With(OrcidMw.ParamsMiddleware).\n\t\t\tWith(OrcidMw.OrcidMiddleware).Post(\"/orcid\", jt.JwtHandler)\n\t})\n\tr.Route(\"/authorize\", func(r chi.Router) {\n\t\ttokenAuth := jwtauth.New(\"RS512\", jt.SignKey, jt.VerifyKey)\n\t\tr.With(middlewares.AuthorizeMiddleware).\n\t\t\tWith(jwtauth.Verifier(tokenAuth)).\n\t\t\tPost(\"/\", jt.JwtFinalHandler)\n\t})\n\tif err := chi.Walk(r, walkFunc); err != nil {\n\t\tlog.Printf(\"error in printing routes %s\\n\", err)\n\t}\n\tlog.Printf(\"Starting web server on port %d\\n\", c.Int(\"port\"))\n\tlog.Fatal(http.ListenAndServe(fmt.Sprintf(\":%d\", c.Int(\"port\")), r))\n\treturn nil\n}", "func (s *Server) Run(ctx context.Context) error {\n\tvar httpsServer *http.Server\n\tvar httpHandler http.Handler\n\tif s.conf.SecurePort > 0 {\n\t\tif s.conf.SecureHostname == \"\" {\n\t\t\treturn errors.New(\"SECUREHOSTNAME not set\")\n\t\t}\n\t\tvar certDir string\n\t\tif s.conf.CertDir != \"\" {\n\t\t\tcertDir = s.conf.CertDir\n\t\t} else {\n\t\t\tcacheDir, err := os.UserCacheDir()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tcertDir = filepath.Join(cacheDir, \"wuerfler-certs\")\n\t\t}\n\t\tif !strings.Contains(strings.Trim(s.conf.SecureHostname, \".\"), \".\") {\n\t\t\treturn errors.New(\"acme/autocert aha: server name component count invalid\")\n\t\t}\n\t\tm := &autocert.Manager{\n\t\t\tCache: autocert.DirCache(certDir),\n\t\t\tPrompt: autocert.AcceptTOS,\n\t\t\tHostPolicy: autocert.HostWhitelist(s.conf.SecureHostname),\n\t\t}\n\t\thttpsServer = &http.Server{\n\t\t\tAddr: fmt.Sprintf(\":%d\", s.conf.SecurePort),\n\t\t\tTLSConfig: m.TLSConfig(),\n\t\t\tHandler: s.router,\n\t\t}\n\t\thttpHandler = m.HTTPHandler(http.HandlerFunc(redirect))\n\t} else {\n\t\thttpHandler = s.router\n\t}\n\n\thttpServer := &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", s.conf.Port),\n\t\tHandler: httpHandler,\n\t}\n\n\terrCh := make(chan error)\n\tif httpsServer != nil {\n\t\tgo func() {\n\t\t\ts.log.Println(\"Starting HTTPS server.\")\n\t\t\terr := httpsServer.ListenAndServeTLS(\"\", \"\")\n\t\t\ts.log.Println(\"Ended HTTPS server.\")\n\t\t\terrCh <- fmt.Errorf(\"httpsServer.ListenAndServeTLS: %v\", err)\n\t\t}()\n\t}\n\tgo func() {\n\t\ts.log.Println(\"Starting HTTP server.\")\n\t\terr := httpServer.ListenAndServe()\n\t\ts.log.Println(\"Ended HTTP server.\")\n\t\terrCh <- fmt.Errorf(\"httpServer.ListenAndServe: %v\", err)\n\t}()\n\n\tprometheus.MustRegister(rooms.RoomsGauge)\n\tprometheus.MustRegister(ConnectionsGauge)\n\n\tselect {\n\tcase <-ctx.Done():\n\t\tif httpsServer != nil {\n\t\t\terr := httpsServer.Close()\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(\"httpsServer.Close:\", err)\n\t\t\t}\n\t\t}\n\t\terr := httpServer.Close()\n\t\tif err != nil {\n\t\t\tlog.Println(\"httpServer.Close:\", err)\n\t\t}\n\t\treturn nil\n\tcase err := <-errCh:\n\t\treturn err\n\t}\n}", "func RunServer(configFile string) {\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, syscall.SIGTERM)\n\tserver, err := NewServer(configFile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tlog.Info(\"Gohan no jikan desuyo (It's time for dinner!)\")\n\tlog.Info(\"Build version: %s\", version.Build.Version)\n\tlog.Info(\"Build timestamp: %s\", version.Build.Timestamp)\n\tlog.Info(\"Build host: %s\", version.Build.Host)\n\tlog.Info(\"Starting Gohan Server...\")\n\taddress := server.address\n\tif strings.HasPrefix(address, \":\") {\n\t\taddress = \"localhost\" + address\n\t}\n\tprotocol := \"http\"\n\tif server.tls != nil {\n\t\tprotocol = \"https\"\n\t}\n\tlog.Info(\" API Server %s://%s/\", protocol, address)\n\tlog.Info(\" Web UI %s://%s/webui/\", protocol, address)\n\tgo func() {\n\t\tfor range c {\n\t\t\tlog.Info(\"Stopping the server...\")\n\t\t\tlog.Info(\"Tearing down...\")\n\t\t\tlog.Info(\"Stopping server...\")\n\t\t\tserver.Stop()\n\t\t}\n\t}()\n\tserver.running = true\n\tserver.masterCtx, server.masterCtxCancel = context.WithCancel(context.Background())\n\n\tserver.startSyncProcesses()\n\n\tstartCRONProcess(server)\n\tmetrics.StartMetricsProcess()\n\terr = server.Start()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (s *AppServer) Run() {\n\tvar (\n\t\tconfig = s.config.Section()\n\t\tnetwork = \"tcp\"\n\t\taddr = config.Server.Addr\n\t\tport = config.Server.Port\n\t\trtimeout = DefaultHttpRequestTimeout\n\t\twtimeout = DefaultHttpResponseTimeout\n\t\tmaxheaderbytes = 0\n\n\t\tlocalAddr string\n\t)\n\n\t// throttle of rate limit\n\tif config.Server.Throttle > 0 {\n\t\ts.throttle = time.NewTicker(time.Second / time.Duration(config.Server.Throttle))\n\t}\n\n\t// adjust app server slowdown ms\n\ts.slowdown = time.Duration(config.Server.SlowdownMs) * time.Millisecond\n\n\t// adjust app server request id\n\tif config.Server.RequestId != \"\" {\n\t\ts.requestId = config.Server.RequestId\n\t}\n\n\t// adjust app logger filter parameters\n\ts.filterParams = config.Logger.FilterParams\n\n\t// If the port is zero, treat the address as a fully qualified local address.\n\t// This address must be prefixed with the network type followed by a colon,\n\t// e.g. unix:/tmp/app.socket or tcp6:::1 (equivalent to tcp6:0:0:0:0:0:0:0:1)\n\tif port == 0 {\n\t\tpieces := strings.SplitN(addr, \":\", 2)\n\n\t\tnetwork = pieces[0]\n\t\tlocalAddr = pieces[1]\n\t} else {\n\t\tlocalAddr = addr + \":\" + strconv.Itoa(port)\n\t}\n\n\tif config.Server.RTimeout > 0 {\n\t\trtimeout = config.Server.RTimeout\n\t}\n\tif config.Server.WTimeout > 0 {\n\t\twtimeout = config.Server.WTimeout\n\t}\n\tif config.Server.MaxHeaderBytes > 0 {\n\t\tmaxheaderbytes = config.Server.MaxHeaderBytes\n\t}\n\n\tserver := &http.Server{\n\t\tAddr: localAddr,\n\t\tHandler: s,\n\t\tReadTimeout: time.Duration(rtimeout) * time.Second,\n\t\tWriteTimeout: time.Duration(wtimeout) * time.Second,\n\t\tMaxHeaderBytes: maxheaderbytes,\n\t}\n\n\ts.logger.Infof(\"Listening on %s\", localAddr)\n\tif config.Server.Ssl {\n\t\tif network != \"tcp\" {\n\t\t\t// This limitation is just to reduce complexity, since it is standard\n\t\t\t// to terminate SSL upstream when using unix domain sockets.\n\t\t\ts.logger.Fatal(\"[GOGO]=> SSL is only supported for TCP sockets.\")\n\t\t}\n\n\t\ts.logger.Fatal(\"[GOGO]=> Failed to listen:\", server.ListenAndServeTLS(config.Server.SslCert, config.Server.SslKey))\n\t} else {\n\t\tlistener, err := net.Listen(network, localAddr)\n\t\tif err != nil {\n\t\t\ts.logger.Fatal(\"[GOGO]=> Failed to listen:\", err)\n\t\t}\n\n\t\ts.logger.Fatal(\"[GOGO]=> Failed to serve:\", server.Serve(listener))\n\t}\n}", "func (srv *Server) Run() error {\n\tlog.Printf(\"Server listening on port %v\", srv.cfg.Port)\n\treturn http.ListenAndServeTLS(fmt.Sprintf(\":%v\", srv.cfg.Port), srv.cfg.CertFilePath, srv.cfg.KeyFilePath, srv.router)\n}", "func (s *Server) Run(\n\t// Common\n\tctx context.Context,\n\tlog logger.Logger,\n\ttracer *trace.TracerProvider,\n) (*Server, error) {\n\t// API port\n\tviper.SetDefault(\"API_PORT\", 7070) // nolint:gomnd\n\t// Request Timeout (seconds)\n\tviper.SetDefault(\"API_TIMEOUT\", \"60s\")\n\n\tconfig := http_server.Config{\n\t\tPort: viper.GetInt(\"API_PORT\"),\n\t\tTimeout: viper.GetDuration(\"API_TIMEOUT\"),\n\t}\n\n\tg := errgroup.Group{}\n\n\tg.Go(func() error {\n\t\treturn s.run(\n\t\t\tctx,\n\t\t\tconfig,\n\t\t\tlog,\n\t\t\ttracer,\n\t\t)\n\t})\n\n\treturn s, nil\n}", "func Serve(ctx context.Context, serviceName string, server RegistersServices, swagger http.FileSystem) error {\n\tcfg := server.GetConfig()\n\ttlsCfg := cfg.GetServer().GetTLS()\n\n\tl := log.WithField(\"service\", serviceName)\n\n\tvar serverOptions []grpc.ServerOption\n\n\t// If certificate file and key file have been specified then setup a TLS server\n\tif tlsCfg.GetEnabled() {\n\t\tl.Trace(\"tls enabled\")\n\n\t\tcreds, err := NewServerTLSCredentials(cfg.GetServer().GetTLS(), cfg.GetVault())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tserverOptions = append(serverOptions, grpc.Creds(creds))\n\t} else {\n\t\tl.Trace(\"tls disabled\")\n\t}\n\n\t// Intercept all request to provide authentication\n\tif a, ok := server.(HasAuthenticate); ok {\n\t\tserverOptions = append(serverOptions, grpc.UnaryInterceptor(a.Authenticate))\n\t}\n\n\t// Setup the gRPC server\n\tgrpcServer := grpc.NewServer(serverOptions...)\n\n\t// Listen on the configured port\n\taddr := fmt.Sprintf(\"%s:%d\", cfg.GetServer().GetBind(), cfg.GetServer().GetListen())\n\n\t// Setup the gateway mux\n\tgwmux := runtime.NewServeMux(\n\t\truntime.WithIncomingHeaderMatcher(incomingHeaderMatcher),\n\t\truntime.WithForwardResponseOption(redirectFilter),\n\t\truntime.WithOutgoingHeaderMatcher(outgoingHeaderMatcher),\n\t\truntime.WithMarshalerOption(\"application/octet-stream\", &BinaryMarshaler{}),\n\t\truntime.WithMarshalerOption(\"application/json\", &runtime.JSONPb{\n\t\t\tEnumsAsInts: true,\n\t\t\tEmitDefaults: false,\n\t\t\tOrigName: false,\n\t\t}),\n\t\truntime.WithMarshalerOption(\"application/javascript\", &runtime.JSONPb{\n\t\t\tEnumsAsInts: true,\n\t\t\tEmitDefaults: false,\n\t\t\tOrigName: false,\n\t\t}),\n\t\truntime.WithMarshalerOption(runtime.MIMEWildcard, &runtime.HTTPBodyMarshaler{\n\t\t\tMarshaler: &runtime.JSONPb{\n\t\t\t\tEnumsAsInts: true,\n\t\t\t\tEmitDefaults: false,\n\t\t\t\tOrigName: true,\n\t\t\t},\n\t\t}),\n\t)\n\n\t// Dial the server\n\tl.Trace(\"loading client credentials for loopback\")\n\tcreds, err := NewClientTLSCredentials(tlsCfg, cfg.GetVault())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdialOptions := []grpc.DialOption{\n\t\tgrpc.WithTransportCredentials(creds),\n\t}\n\n\tl.Trace(\"dialling grpc\")\n\tconn, err := grpc.Dial(addr, dialOptions...)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Create the HTTP server\n\tmux := http.NewServeMux()\n\n\t// Add the Health check endpoint\n\tvar checker HealthChecker\n\tif c, ok := server.(HealthChecker); ok {\n\t\tchecker = c\n\t}\n\n\tmux.Handle(\"/healthz\", &HealthHandler{\n\t\tchecker: checker,\n\t})\n\n\t// Add the Metrics endpoint\n\tmux.Handle(\"/metrics\", promhttp.Handler())\n\n\t// If swagger is enabled, add the swagger endpoint\n\tif swagger != nil && cfg.GetServer().GetSwagger().GetEnabled() {\n\t\tl.Trace(\"swagger enabled\")\n\n\t\terr = mime.AddExtensionType(\".svg\", \"image/svg+xml\")\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tswmux := http.StripPrefix(fmt.Sprintf(\"/%s/swagger\", serviceName), http.FileServer(swagger))\n\n\t\tmux.Handle(fmt.Sprintf(\"/%s/swagger/\", serviceName), swmux)\n\t} else {\n\t\tl.Trace(\"swagger disabled\")\n\t}\n\n\t// Add the JSON gateway\n\tjsonGateway, err := NewInstrumentedMetricHandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.TLS != nil {\n\t\t\t// Only on TLS per https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Strict-Transport-Security\n\t\t\tw.Header().Add(\"Strict-Transport-Security\", \"max-age=63072000; includeSubDomains; preload\")\n\t\t}\n\n\t\tw.Header().Set(\"Vary\", \"Origin\")\n\t\tw.Header().Set(\"Access-Control-Allow-Credentials\", \"true\")\n\n\t\tif origin := r.Header.Get(\"Origin\"); origin != \"\" {\n\t\t\tw.Header().Set(\"Access-Control-Allow-Origin\", origin)\n\t\t\tif r.Method == \"OPTIONS\" && r.Header.Get(\"Access-Control-Request-Method\") != \"\" {\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Headers\", headers)\n\t\t\t\tw.Header().Set(\"Access-Control-Allow-Methods\", methods)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\tgwmux.ServeHTTP(w, r)\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmux.Handle(fmt.Sprintf(\"/%s/\", serviceName), jsonGateway)\n\n\t// Finally, add the GRPC handler at the root\n\tgrpcHandler, err := NewInstrumentedMetricHandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\tif r.ProtoMajor == 2 && strings.Contains(r.Header.Get(\"Content-Type\"), \"application/grpc\") {\n\t\t\tgrpcServer.ServeHTTP(w, r)\n\t\t} else if h, ok := server.(http.Handler); ok {\n\t\t\th.ServeHTTP(w, r)\n\t\t} else {\n\t\t\thttp.NotFound(w, r)\n\t\t}\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmux.Handle(\"/\", grpcHandler)\n\n\tw := log.Writer()\n\tdefer w.Close()\n\n\tsrv := &http.Server{\n\t\tAddr: addr,\n\t\tHandler: mux,\n\t\tErrorLog: syslog.New(w, \"[http]\", 0),\n\t}\n\n\t// Register all the services\n\tl.Trace(\"registering services\")\n\terr = server.RegisterServices(ctx, grpcServer, gwmux, conn)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tl.WithField(\"addr\", addr).Info(\"listening\")\n\tln, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer ln.Close()\n\n\t// Server requests\n\tif tlsCfg.GetEnabled() {\n\t\tl.Trace(\"loading server tls certs\")\n\t\tconfig, err := NewServerTLSConfig(cfg.GetServer().GetTLS(), cfg.GetVault())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tl.Info(\"serving\")\n\t\treturn srv.Serve(tls.NewListener(ln, config))\n\t}\n\n\tl.Info(\"serving\")\n\treturn srv.Serve(ln)\n}", "func main() {\n\tfmt.Println(\"Hello server\")\n\n\tlis, err := net.Listen(\"tcp\", \"0.0.0.0:50051\")\n\tif err != nil {\n\t\tlog.Fatalf(\"Faild to listen: %v\", err)\n\t}\n\n\ts := grpc.NewServer()\n\thellopb.RegisterHelloServiceServer(s, &server{})\n\n\tif err := s.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"Failed to serve: %v\", err)\n\t}\n\n}", "func RunHTTPServer(addr string) error {\n\tr := mux.NewRouter()\n\tr.HandleFunc(\"/method/{method}\", methodsHandler)\n\tr.HandleFunc(\"/\", staticHandler)\n\treturn http.ListenAndServe(addr, r)\n}", "func Run(ctx context.Context, port string) struct{} {\n\n\t//The server to get up\n\tli, err := net.Listen(\"tcp\", port)\n\tif err != nil {\n\t\tlog.Fatalf(\"Server stopped: %v\", err)\n\t}\n\n\t//Passing the server to grpc\n\ts := &Server{}\n\tgrpcServer := grpc.NewServer()\n\tsubscribe.RegisterSubscribeServiceServer(grpcServer, s)\n\tgrpcServer.Serve(li)\n\n\tfmt.Printf(\"Server up on port: %v\\n\", err)\n\treturn <-ctx.Done()\n}", "func (params *serverParams) Run(context.Context) error {\n\tif params.err != nil && len(params.err) > 0 {\n\t\treturn utilerrors.NewAggregate(params.err)\n\t}\n\n\tvar g errgroup.Group\n\tif params.secureServer != nil {\n\t\tparams.options.Logger.Info(fmt.Sprintf(\"starting %s %s server on %s:%d\", params.options.Name, \"HTTPS\", params.options.SecureAddr, params.options.SecurePort))\n\t\tg.Go(func() error {\n\t\t\treturn params.secureServer.ListenAndServeTLS(params.options.TLSCert, params.options.TLSKey)\n\t\t})\n\t}\n\tif params.insecureServer != nil {\n\t\tparams.options.Logger.Info(fmt.Sprintf(\"starting %s %s server on %s:%d\", params.options.Name, \"HTTP\", params.options.InsecureAddr, params.options.InsecurePort))\n\t\tg.Go(func() error {\n\t\t\treturn params.insecureServer.ListenAndServe()\n\t\t})\n\t}\n\terr := g.Wait()\n\tif err != nil && err != http.ErrServerClosed {\n\t\tparams.options.Logger.Error(err.Error())\n\t\treturn err\n\t}\n\treturn nil\n}", "func RunServer() {\n\tengine := initialize()\n\tSERVER_PORT := \"18020\"\n\tengine.Run(\"0.0.0.0:\" + SERVER_PORT)\n}", "func (s *HttpServer) Run() {\n\n\tgo s.httpServer()\n\t<-s.quitChan\n}", "func (s *Server) Run() error {\n\n\tgo func() {\n\t\tizap.Logger.Info(\"Starting http server\", zap.String(\"address\", s.srv.Addr))\n\t\terr := s.srv.ListenAndServe()\n\t\tif err != nil && err != http.ErrServerClosed {\n\t\t\ts.Stop()\n\t\t}\n\t}()\n\n\treturn nil\n}", "func main() {\n\tgwMux := runtime.NewServeMux()\n\tendPoint := \"localhost:8081\"\n\topt := []grpc.DialOption{grpc.WithTransportCredentials(helper.GetClientCreds())}\n\t// prod\n\tif err := pbfiles.RegisterProdServiceHandlerFromEndpoint(context.Background(), gwMux, endPoint, opt); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// order\n\tif err := pbfiles.RegisterOrderServiceHandlerFromEndpoint(context.Background(), gwMux, endPoint, opt); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\thttpServer := &http.Server{\n\t\tAddr: \":8080\",\n\t\tHandler: gwMux,\n\t}\n\n\tif err := httpServer.ListenAndServe(); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func Run(ctx context.Context) error {\n\tctx, cancel := context.WithCancel(ctx)\n\tlogger := log.WithContext(nil)\n\n\ttester := server.Prepare(&server.Options{\n\t\tName: \"tester\",\n\t\tInsecureAddr: viper.GetString(options.FlagServerInsecureBindAddress.GetLong()),\n\t\tSecureAddr: viper.GetString(options.FlagServerSecureBindAddress.GetLong()),\n\t\tInsecurePort: viper.GetInt(options.FlagServerInsecurePort.GetLong()),\n\t\tSecurePort: viper.GetInt(options.FlagServerSecurePort.GetLong()),\n\t\tTLSKey: viper.GetString(options.FlagServerTLSKeyFile.GetLong()),\n\t\tTLSCert: viper.GetString(options.FlagServerTLSCertFile.GetLong()),\n\t\tTLSCa: viper.GetString(options.FlagServerTLSCaFile.GetLong()),\n\t\tHandler: chain.New().Add(\n\t\t\tmiddleware.Correlation,\n\t\t\tmiddleware.Metrics,\n\t\t).Link(middleware.Dump),\n\t\tLogger: logger.Sugar(),\n\t})\n\n\tmetrics := server.Prepare(&server.Options{\n\t\tName: \"prometheus\",\n\t\tInsecureAddr: viper.GetString(options.FlagPrometheusInsecureBindAddress.GetLong()),\n\t\tInsecurePort: viper.GetInt(options.FlagPrometheusInsecurePort.GetLong()),\n\t\tHandler: promhttp.Handler(),\n\t\tLogger: logger.Sugar(),\n\t})\n\n\tvar g run.Group\n\n\tg.Add(func() error {\n\t\t<-ctx.Done()\n\t\treturn nil\n\t}, func(error) {\n\t\tcancel()\n\t})\n\n\tg.Add(func() error {\n\t\treturn logError(metrics.Run())\n\t}, func(error) {\n\t\tlogError(metrics.Close())\n\t})\n\n\tg.Add(func() error {\n\t\treturn logError(tester.Run())\n\t}, func(error) {\n\t\tlogError(tester.Close())\n\t})\n\n\treturn g.Run()\n}", "func (s *Server) Run() error {\n\t// configure service routes\n\ts.configureRoutes()\n\n\tlog.Infof(\"Serving '%s - %s' on address %s\", s.Title, s.Version, s.server.Addr)\n\t// server is set to healthy when started.\n\ts.healthy = true\n\tif s.config.InsecureHTTP {\n\t\treturn s.server.ListenAndServe()\n\t}\n\treturn s.server.ListenAndServeTLS(s.config.TLSCertFile, s.config.TLSKeyFile)\n}", "func RunServer(parentCtx context.Context, eaaCtx *eaaContext) error {\n\tvar err error\n\n\teaaCtx.serviceInfo = services{}\n\teaaCtx.consumerConnections = consumerConns{}\n\teaaCtx.subscriptionInfo = NotificationSubscriptions{}\n\n\tif eaaCtx.certsEaaCa.rca, err = InitRootCA(eaaCtx.cfg.Certs); err != nil {\n\t\tlog.Errf(\"CA cert creation error: %#v\", err)\n\t\treturn err\n\t}\n\n\tif eaaCtx.certsEaaCa.eaa, err = InitEaaCert(eaaCtx.cfg.Certs); err != nil {\n\t\tlog.Errf(\"EAA cert creation error: %#v\", err)\n\t\treturn err\n\t}\n\n\tcertPool, err := CreateAndSetCACertPool(eaaCtx.cfg.Certs.CaRootPath)\n\tif err != nil {\n\t\tlog.Errf(\"Cert Pool error: %#v\", err)\n\t}\n\n\trouter := NewEaaRouter(eaaCtx)\n\tserver := &http.Server{\n\t\tAddr: eaaCtx.cfg.TLSEndpoint,\n\t\tTLSConfig: &tls.Config{\n\t\t\tClientAuth: tls.RequireAndVerifyClientCert,\n\t\t\tClientCAs: certPool,\n\t\t\tMinVersion: tls.VersionTLS12,\n\t\t\tCipherSuites: []uint16{tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256},\n\t\t},\n\t\tHandler: router,\n\t}\n\n\tauthRouter := NewAuthRouter(eaaCtx)\n\tserverAuth := &http.Server{Addr: eaaCtx.cfg.OpenEndpoint,\n\t\tHandler: authRouter}\n\n\tlis, err := net.Listen(\"tcp\", eaaCtx.cfg.TLSEndpoint)\n\tif err != nil {\n\n\t\tlog.Errf(\"net.Listen error: %+v\", err)\n\n\t\te, ok := err.(*os.SyscallError)\n\t\tif ok {\n\t\t\tlog.Errf(\"net.Listen error: %+v\", e.Error())\n\t\t}\n\t\treturn err\n\t}\n\n\tstopServerCh := make(chan bool, 2)\n\n\tgo func(stopServerCh chan bool) {\n\t\t<-parentCtx.Done()\n\t\tlog.Info(\"Executing graceful stop\")\n\t\tif err = server.Close(); err != nil {\n\t\t\tlog.Errf(\"Could not close EAA server: %#v\", err)\n\t\t}\n\t\tif err = serverAuth.Close(); err != nil {\n\t\t\tlog.Errf(\"Could not close Auth server: %#v\", err)\n\t\t}\n\t\tlog.Info(\"EAA server stopped\")\n\t\tlog.Info(\"Auth server stopped\")\n\t\tstopServerCh <- true\n\t}(stopServerCh)\n\n\tdefer log.Info(\"Stopped EAA serving\")\n\n\tgo func(stopServerCh chan bool) {\n\t\tlog.Infof(\"Serving Auth on: %s\", eaaCtx.cfg.OpenEndpoint)\n\t\tif err = serverAuth.ListenAndServe(); err != nil {\n\t\t\tlog.Info(\"Auth server error: \" + err.Error())\n\t\t}\n\t\tlog.Errf(\"Stopped Auth serving\")\n\t\tstopServerCh <- true\n\t}(stopServerCh)\n\n\tlog.Infof(\"Serving EAA on: %s\", eaaCtx.cfg.TLSEndpoint)\n\tutil.Heartbeat(parentCtx, eaaCtx.cfg.HeartbeatInterval, func() {\n\t\t// TODO: implementation of modules checking\n\t\tlog.Info(\"Heartbeat\")\n\t})\n\tif err = server.ServeTLS(lis, eaaCtx.cfg.Certs.ServerCertPath,\n\t\teaaCtx.cfg.Certs.ServerKeyPath); err != http.ErrServerClosed {\n\t\tlog.Errf(\"server.Serve error: %#v\", err)\n\t\treturn err\n\t}\n\t<-stopServerCh\n\t<-stopServerCh\n\treturn nil\n}", "func (s *sdkGrpcServer) Start() error {\n\n\t// Setup https if certs have been provided\n\topts := make([]grpc.ServerOption, 0)\n\tif s.config.Net != \"unix\" && s.config.Security.Tls != nil {\n\t\tcreds, err := credentials.NewServerTLSFromFile(\n\t\t\ts.config.Security.Tls.CertFile,\n\t\t\ts.config.Security.Tls.KeyFile)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create credentials from cert files: %v\", err)\n\t\t}\n\t\topts = append(opts, grpc.Creds(creds))\n\t\ts.log.Info(\"SDK TLS enabled\")\n\t} else {\n\t\ts.log.Info(\"SDK TLS disabled\")\n\t}\n\n\t// Add correlation interceptor\n\tcorrelationInterceptor := correlation.ContextInterceptor{\n\t\tOrigin: correlation.ComponentSDK,\n\t}\n\n\t// Setup authentication and authorization using interceptors if auth is enabled\n\tif len(s.config.Security.Authenticators) != 0 {\n\t\topts = append(opts, grpc.UnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryServer(\n\t\t\t\ts.rwlockUnaryIntercepter,\n\t\t\t\tcorrelationInterceptor.ContextUnaryServerInterceptor,\n\t\t\t\tgrpc_auth.UnaryServerInterceptor(s.auth),\n\t\t\t\ts.authorizationServerUnaryInterceptor,\n\t\t\t\ts.loggerServerUnaryInterceptor,\n\t\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t\t)))\n\t\topts = append(opts, grpc.StreamInterceptor(\n\t\t\tgrpc_middleware.ChainStreamServer(\n\t\t\t\ts.rwlockStreamIntercepter,\n\t\t\t\tgrpc_auth.StreamServerInterceptor(s.auth),\n\t\t\t\ts.authorizationServerStreamInterceptor,\n\t\t\t\ts.loggerServerStreamInterceptor,\n\t\t\t\tgrpc_prometheus.StreamServerInterceptor,\n\t\t\t)))\n\t} else {\n\t\topts = append(opts, grpc.UnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryServer(\n\t\t\t\ts.rwlockUnaryIntercepter,\n\t\t\t\tcorrelationInterceptor.ContextUnaryServerInterceptor,\n\t\t\t\ts.loggerServerUnaryInterceptor,\n\t\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t\t)))\n\t\topts = append(opts, grpc.StreamInterceptor(\n\t\t\tgrpc_middleware.ChainStreamServer(\n\t\t\t\ts.rwlockStreamIntercepter,\n\t\t\t\ts.loggerServerStreamInterceptor,\n\t\t\t\tgrpc_prometheus.StreamServerInterceptor,\n\t\t\t)))\n\t}\n\n\t// Start the gRPC Server\n\terr := s.GrpcServer.StartWithServer(func() *grpc.Server {\n\t\tgrpcServer := grpc.NewServer(opts...)\n\n\t\tapi.RegisterOpenStorageClusterServer(grpcServer, s.clusterServer)\n\t\tapi.RegisterOpenStorageNodeServer(grpcServer, s.nodeServer)\n\t\tapi.RegisterOpenStorageObjectstoreServer(grpcServer, s.objectstoreServer)\n\t\tapi.RegisterOpenStorageSchedulePolicyServer(grpcServer, s.schedulePolicyServer)\n\t\tapi.RegisterOpenStorageIdentityServer(grpcServer, s.identityServer)\n\t\tapi.RegisterOpenStorageVolumeServer(grpcServer, s.volumeServer)\n\t\tapi.RegisterOpenStorageMigrateServer(grpcServer, s.volumeServer)\n\t\tapi.RegisterOpenStorageCredentialsServer(grpcServer, s.credentialServer)\n\t\tapi.RegisterOpenStorageCloudBackupServer(grpcServer, s.cloudBackupServer)\n\t\tapi.RegisterOpenStorageMountAttachServer(grpcServer, s.volumeServer)\n\t\tapi.RegisterOpenStorageAlertsServer(grpcServer, s.alertsServer)\n\t\tapi.RegisterOpenStorageClusterPairServer(grpcServer, s.clusterPairServer)\n\t\tapi.RegisterOpenStoragePolicyServer(grpcServer, s.policyServer)\n\t\tapi.RegisterOpenStorageClusterDomainsServer(grpcServer, s.clusterDomainsServer)\n\t\tapi.RegisterOpenStorageFilesystemTrimServer(grpcServer, s.filesystemTrimServer)\n\t\tapi.RegisterOpenStorageFilesystemCheckServer(grpcServer, s.filesystemCheckServer)\n\t\tapi.RegisterOpenStorageWatchServer(grpcServer, s.watcherServer)\n\t\tif s.diagsServer != nil {\n\t\t\tapi.RegisterOpenStorageDiagsServer(grpcServer, s.diagsServer)\n\t\t}\n\n\t\tif s.bucketServer != nil {\n\t\t\tapi.RegisterOpenStorageBucketServer(grpcServer, s.bucketServer)\n\t\t}\n\n\t\tif s.storagePoolServer != nil {\n\t\t\tapi.RegisterOpenStoragePoolServer(grpcServer, s.storagePoolServer)\n\t\t}\n\n\t\tif s.config.Security.Role != nil {\n\t\t\tapi.RegisterOpenStorageRoleServer(grpcServer, s.roleServer)\n\t\t}\n\t\tif s.jobServer != nil {\n\t\t\tapi.RegisterOpenStorageJobServer(grpcServer, s.jobServer)\n\t\t}\n\n\t\t// Register stats for all the services\n\t\ts.registerPrometheusMetrics(grpcServer)\n\n\t\ts.registerServerExtensions(grpcServer)\n\n\t\treturn grpcServer\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func StartServer(cfg *Configuration) {\n\n\tbanner(versions.GetVersions())\n\n\tif err := setLogs(cfg.LogFormat, cfg.LogLevel); err != nil {\n\t\tlog.Fatalf(\"Error setting up logs: %s\", err)\n\t}\n\n\tzap.L().Info(\"Configuration\", zap.Reflect(\"config\", cfg))\n\n\tr := mux.NewRouter()\n\n\tregisterRoutes(r, cfg.ServerIP, cfg.ServerPort, cfg.PublicKeyPath, cfg.PrivateKeyPath, cfg.DevelopmentMode)\n\n\tserver := &http.Server{\n\t\tAddr: cfg.ServerPort,\n\t\tHandler: r,\n\t}\n\n\tgo func() {\n\t\tvar err error\n\t\tif cfg.TLS {\n\t\t\tserver.TLSConfig = &tls.Config{\n\t\t\t\tPreferServerCipherSuites: true,\n\t\t\t\tCipherSuites: []uint16{\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,\n\t\t\t\t\ttls.TLS_FALLBACK_SCSV,\n\t\t\t\t\ttls.TLS_RSA_WITH_RC4_128_SHA,\n\t\t\t\t\ttls.TLS_RSA_WITH_3DES_EDE_CBC_SHA,\n\t\t\t\t\ttls.TLS_RSA_WITH_AES_128_CBC_SHA,\n\t\t\t\t\ttls.TLS_RSA_WITH_AES_256_CBC_SHA,\n\t\t\t\t\ttls.TLS_RSA_WITH_AES_128_CBC_SHA256,\n\t\t\t\t\ttls.TLS_RSA_WITH_AES_128_GCM_SHA256,\n\t\t\t\t\ttls.TLS_RSA_WITH_AES_256_GCM_SHA384,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_RC4_128_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,\n\t\t\t\t\ttls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,\n\t\t\t\t\ttls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,\n\t\t\t\t},\n\t\t\t\tMinVersion: tls.VersionTLS10,\n\t\t\t\tMaxVersion: tls.VersionTLS13,\n\t\t\t}\n\t\t\terr = server.ListenAndServeTLS(\".data/server.crt\", \".data/server.key\")\n\n\t\t} else {\n\t\t\terr = server.ListenAndServe()\n\t\t}\n\n\t\tif err != nil {\n\t\t\tzap.L().Fatal(\"Unable to start server\",\n\t\t\t\tzap.String(\"port\", cfg.ServerPort),\n\t\t\t\tzap.Error(err))\n\t\t}\n\t}()\n\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, syscall.SIGTERM, syscall.SIGINT, syscall.SIGQUIT)\n\tzap.L().Info(\"Everything started. Waiting for Stop signal\")\n\t// Waiting for a Sig\n\t<-c\n\n\tzap.L().Info(\"Server stopped\")\n}", "func (s *Server) Run(ctx context.Context) error {\n\tvar server *genericapiserver.GenericAPIServer\n\n\tservingInfo := configv1.HTTPServingInfo{}\n\tconfigdefaults.SetRecommendedHTTPServingInfoDefaults(&servingInfo)\n\tservingInfo.ServingInfo.CertInfo.CertFile = s.certFile\n\tservingInfo.ServingInfo.CertInfo.KeyFile = s.keyFile\n\t// Don't set a CA file for client certificates because the CA is read from\n\t// the kube-system/extension-apiserver-authentication ConfigMap.\n\tservingInfo.ServingInfo.ClientCA = \"\"\n\n\tserverConfig, err := serving.ToServerConfig(\n\t\tctx,\n\t\tservingInfo,\n\t\toperatorv1alpha1.DelegatedAuthentication{},\n\t\toperatorv1alpha1.DelegatedAuthorization{},\n\t\ts.kubeConfig,\n\t\ts.kubeClient,\n\t\tnil, // disable leader election\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tserverConfig.Authorization.Authorizer = union.New(\n\t\t// prefix the authorizer with the permissions for metrics scraping which are well known.\n\t\t// openshift RBAC policy will always allow this user to read metrics.\n\t\thardcodedauthorizer.NewHardCodedMetricsAuthorizer(),\n\t\tserverConfig.Authorization.Authorizer,\n\t)\n\n\tserver, err = serverConfig.Complete(nil).New(s.name, genericapiserver.NewEmptyDelegate())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tgo func() {\n\t\tif err := server.PrepareRun().Run(ctx.Done()); err != nil {\n\t\t\tklog.Fatal(err)\n\t\t}\n\t\tklog.Info(\"server exited\")\n\t}()\n\n\t<-ctx.Done()\n\n\treturn nil\n}", "func (s *GrpcServer) StartHTTPServer(sw switcher, connpool ConnPool) {\n\ts.Initializer.InitService(s)\n\ts.httpServer = s.startGrpcHTTPServerInternal(sw)\n\ts.connpool = connpool\n\twatchConfigReload(s)\n}", "func (server *Server) Run() error {\n\t// run http server\n\tserver.runServer()\n\n\tif server.config.Server.EnableHTTPS {\n\t\tif server.config.TLS.CertFile == \"\" || server.config.TLS.KeyFile == \"\" {\n\t\t\treturn errors.New(\"use https should config the cert and key files\")\n\t\t}\n\t\tserver.runServerTLS()\n\t}\n\tif err := server.G.Wait(); err != nil {\n\t\tserver.API.log.Fatalln(err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func Run() {\n\trouter := getRouter()\n\ts := &http.Server{\n\t\tAddr: \"0.0.0.0:8080\",\n\t\tHandler: router,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\ts.ListenAndServe()\n}", "func Run(addr, uri string) {\n\tlog.Printf(\"Server is listening %s\\n\", addr)\n\n\trouter := mux.NewRouter()\n\tvehicle := router.PathPrefix(\"/vehicle/\").Methods(\"GET\", \"OPTIONS\").Subrouter()\n\t// GET /vehicle/registrations.\n\tvehicle.Handle(\"/registrations\", newRegsHandler(uri))\n\t// GET /vehicle/operations.\n\tvehicle.HandleFunc(\"/operations\", operations)\n\n\t// GET /health.\n\trouter.HandleFunc(\"/health\", health)\n\t// GET /version.\n\trouter.Handle(\"/version\", version.Handler{})\n\n\torigins := handlers.AllowedOrigins([]string{\"*\"})\n\tmethods := handlers.AllowedMethods([]string{\"GET\", \"POST\", \"OPTIONS\"})\n\theaders := handlers.AllowedHeaders([]string{\"Api-Key\"})\n\n\tcors := handlers.CORS(origins, methods, headers)(Server(router))\n\tserver := &http.Server{\n\t\tAddr: addr,\n\t\tHandler: cors,\n\t\tReadTimeout: 5 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tserver.SetKeepAlivesEnabled(true)\n\n\tif err := server.ListenAndServe(); err != nil {\n\t\tlog.Fatalf(\"Could not listen on %s. Error: %v\\n\", addr, err)\n\t}\n}", "func (s httpServer) Run(h http.Handler) {\n\ts.srv.Handler = h\n\tgo s.srv.ListenAndServe()\n}", "func (s *server) Run(addr string) error {\n\treturn http.ListenAndServe(addr, s.handler)\n}" ]
[ "0.8256224", "0.80604255", "0.7960991", "0.77290666", "0.7707044", "0.7632992", "0.7599118", "0.7592759", "0.7582168", "0.75483614", "0.754435", "0.7479054", "0.7416336", "0.7406969", "0.7386519", "0.732194", "0.7321806", "0.7298529", "0.729594", "0.7268351", "0.721203", "0.7193644", "0.7185657", "0.714807", "0.71349555", "0.7131188", "0.7129319", "0.7113365", "0.7099362", "0.7092084", "0.70804954", "0.7061909", "0.70524395", "0.70337415", "0.7018676", "0.70142883", "0.7008454", "0.6978917", "0.6977197", "0.69691783", "0.69589835", "0.6954178", "0.6936271", "0.69287467", "0.69196725", "0.6906932", "0.68927795", "0.6888462", "0.6887726", "0.68802786", "0.6878955", "0.6871274", "0.68594354", "0.6852489", "0.68341005", "0.6830744", "0.68200237", "0.681957", "0.68173945", "0.6814584", "0.681387", "0.68116105", "0.6806413", "0.6803335", "0.6803003", "0.6800965", "0.6800882", "0.67900795", "0.6776767", "0.67733705", "0.67694855", "0.67645246", "0.675741", "0.67380154", "0.6737207", "0.6734656", "0.6727197", "0.6717252", "0.6703084", "0.6700726", "0.66965264", "0.6692442", "0.6686874", "0.66808033", "0.66791207", "0.66751724", "0.66745996", "0.66637874", "0.66517097", "0.66500443", "0.6642583", "0.66358095", "0.6626951", "0.66251385", "0.6624813", "0.66220814", "0.6615907", "0.6603834", "0.65966", "0.6591222" ]
0.66951156
81
Converts roman strings to int
func ToInteger(value string) (int, error) { value = strings.ToUpper(value) value = strings.TrimSpace(value) max := len(value) if max == 0 { return 0, errors.New("empty value") } roman := map[string]int{ "I": 1, "V": 5, "X": 10, "L": 50, "C": 100, "D": 500, "M": 1000, } output := 0 rpkey := 0 last := max - 1 lastkey := string(value[last]) lnumber, _ := roman[lastkey] for i := last; i >= 0; i-- { key := string(value[i]) number, found := roman[key] if !found { return 0, errors.New("invalid value") } if number >= lnumber { output += number } else { output += -number } if i < last { if lastkey == key { rpkey++ } else { rpkey = 0 } if rpkey >= 3 { return 0, errors.New("invalid value") } lastkey = string(value[i]) lnumber, _ = roman[lastkey] } } return output, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func romanToInt(s string) int {\n\tright := 0\n\tans := 0\n\tfor i := len(s) - 1; i >= 0; i-- {\n\t\tcur := charToInt[s[i]]\n\t\tif cur >= right {\n\t\t\tans += cur\n\t\t} else {\n\t\t\tans -= cur\n\t\t}\n\t\tright = cur\n\t}\n\treturn ans\n}", "func romanToInt(s string) int {\n\n\tm := make(map[string]int)\n\tm[\"I\"] = 1\n\tm[\"V\"] = 5\n\tm[\"IV\"] = 4\n\tm[\"X\"] = 10\n\tm[\"IX\"] = 9\n\tm[\"L\"] = 50\n\tm[\"XL\"] = 40\n\tm[\"C\"] = 100\n\tm[\"XC\"] = 90\n\tm[\"D\"] = 500\n\tm[\"CD\"] = 400\n\tm[\"M\"] = 1000\n\tm[\"CM\"] = 900\n\n\tsum := 0\n\n\t// I 可以放在 V (5) 和 X (10) 的左边,来表示 4 和 9。\n\t//X 可以放在 L (50) 和 C (100) 的左边,来表示 40 和 90。\n\t//C 可以放在 D (500) 和 M (1000) 的左边,来表示 400 和 900。\n\t// 考虑好边界情况\n\tfor i := 0; i < len(s); i++ {\n\t\tif i == len(s)-1 {\n\t\t\tsum += m[s[i:]]\n\t\t} else {\n\t\t\tif value, ok := m[s[i:i+2]]; ok {\n\t\t\t\tsum += value\n\t\t\t\ti++\n\t\t\t} else {\n\t\t\t\tsum += m[s[i:i+1]]\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn sum\n}", "func romanToInt(s string) int {\n\tif len(s) <= 0 {\n\t\treturn 0\n\t}\n\n\tr2Int := map[byte]int{\n\t\t'I': 1,\n\t\t'V': 5,\n\t\t'X': 10,\n\t\t'L': 50,\n\t\t'C': 100,\n\t\t'D': 500,\n\t\t'M': 1000,\n\t}\n\n\tsum := 0\n\tfor i := 0; i < len(s); i++ {\n\t\tcur := r2Int[s[i]]\n\t\tif i < len(s)-1 && cur < r2Int[s[i+1]] {\n\t\t\tsum -= cur\n\t\t} else {\n\t\t\tsum += cur\n\t\t}\n\t}\n\treturn sum\n}", "func romanToInt(s string) int {\n\t// 20200806\n\t// 1、从左往右遍历\n\t// 执行耗时:0 ms,击败了100.00% 的Go用户\n\t// 内存消耗:3.1 MB,击败了100.00% 的Go用户\n\t/*ans := 0\n\tl := len(s)\n\tfor i := 0; i < l; i++ {\n\t\tif s[i] == 'I' && i+1 < l && s[i+1] == 'V' {\n\t\t\tans += 4\n\t\t\ti ++\n\t\t} else if s[i] == 'I' && i+1 < l && s[i+1] == 'X' {\n\t\t\tans += 9\n\t\t\ti ++\n\t\t} else if s[i] == 'X' && i+1 < l && s[i+1] == 'L' {\n\t\t\tans += 40\n\t\t\ti ++\n\t\t} else if s[i] == 'X' && i+1 < l && s[i+1] == 'C' {\n\t\t\tans += 90\n\t\t\ti ++\n\t\t} else if s[i] == 'C' && i+1 < l && s[i+1] == 'D' {\n\t\t\tans += 400\n\t\t\ti ++\n\t\t} else if s[i] == 'C' && i+1 < l && s[i+1] == 'M' {\n\t\t\tans += 900\n\t\t\ti ++\n\t\t} else {\n\t\t\tswitch s[i] {\n\t\t\tcase 'I':\n\t\t\t\tans += 1\n\t\t\tcase 'V':\n\t\t\t\tans += 5\n\t\t\tcase 'X':\n\t\t\t\tans += 10\n\t\t\tcase 'L':\n\t\t\t\tans += 50\n\t\t\tcase 'C':\n\t\t\t\tans += 100\n\t\t\tcase 'D':\n\t\t\t\tans += 500\n\t\t\tcase 'M':\n\t\t\t\tans += 1000\n\t\t\t}\n\t\t}\n\t}\n\treturn ans*/\n\n\t// 20200806\n\t// 2、从右往左遍历,可以使代码更简洁 - 结合map\n\t// 执行耗时:4 ms,击败了94.70% 的Go用户\n\t// 内存消耗:3.1 MB,击败了60.00% 的Go用户\n\tm := map[byte]int{\n\t\t'I': 1,\n\t\t'V': 5,\n\t\t'X': 10,\n\t\t'L': 50,\n\t\t'C': 100,\n\t\t'D': 500,\n\t\t'M': 1000,\n\t}\n\tans := 0\n\tpre := 0\n\tfor i := len(s) - 1; i >= 0; i-- {\n\t\tcur := m[s[i]]\n\t\tif cur >= pre {\n\t\t\tans += cur\n\t\t} else {\n\t\t\tans -= cur\n\t\t}\n\t\tpre = cur\n\t}\n\treturn ans\n}", "func romanToInt(s string) int {\n\tnum := 0\n\tfor i := 0; i < len(s); {\n\t\tn := 1\n\t\tns := s[i : i+n]\n\t\t_, ok := symbol[ns]\n\t\tfor ok {\n\t\t\tn++\n\t\t\tif n+i > len(s) {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tns = s[i : i+n]\n\t\t\t_, ok = symbol[ns]\n\t\t}\n\n\t\tn = n - 1\n\t\tnum += symbol[s[i:i+n]]\n\t\ti = i + n\n\t}\n\n\treturn num\n}", "func RomanToInt(s string) int {\n\tvar dict = map[uint8]int{\n\t\t'I': 1,\n\t\t'V': 5,\n\t\t'X': 10,\n\t\t'L': 50,\n\t\t'C': 100,\n\t\t'D': 500,\n\t\t'M': 1000,\n\t}\n\tvar value = 0\n\tfor i:=0; i<len(s); i++ {\n\t\tif i == len(s) - 1{\n\t\t\tvalue = value + dict[s[i]]\n\t\t\tcontinue\n\t\t}\n\t\tvar curr = s[i]\n\t\tvar next = s[i+1]\n\t\tif (curr == 'I' && (next == 'V' || next=='X')) || (curr == 'X' && (next == 'L' || next == 'C')) || (curr == 'C' && (next == 'D' || next == 'M')){\n\t\t\tvalue = value + dict[next] - dict[curr]\n\t\t\ti++\n\t\t} else {\n\t\t\tvalue = value + dict[curr]\n\t\t}\n\t}\n\treturn value\n}", "func romanToInt(s string) int {\n\tif len(s) == 0 {\n\t\treturn 0\n\t}\n\n\tres := 0\n\tfor i := range s[:len(s)-1] {\n\t\tif numerals[s[i]] < numerals[s[i+1]] {\n\t\t\tres -= numerals[s[i]]\n\t\t} else {\n\t\t\tres += numerals[s[i]]\n\t\t}\n\t}\n\n\t//last one is always added\n\tres += numerals[s[len(s)-1]]\n\n\treturn res\n}", "func romanToInt1(s string) int {\n\tans := 0\n\ti := 0\n\tfor i < len(s)-1 {\n\t\tif d := strToInt[s[i:i+2]]; d > 0 {\n\t\t\tans += d\n\t\t\ti += 2\n\t\t} else {\n\t\t\tans += strToInt[s[i:i+1]]\n\t\t\ti++\n\t\t}\n\t}\n\tif i < len(s) {\n\t\tans += strToInt[s[i:i+1]]\n\t}\n\treturn ans\n}", "func ConvertToInteger(rn string) int {\n\tresult := 0\n\tlengthOfRomanNumeral := len(rn)\n\tfor i := 0; i < lengthOfRomanNumeral; i++ {\n\t\trnCharacter := string(rn[i])\n\t\trnValue := romanNumeralToNumberMap[rnCharacter]\n\t\tif i < lengthOfRomanNumeral-1 {\n\t\t\trnNextCharacter := string(rn[i+1])\n\t\t\trnNextValue := romanNumeralToNumberMap[rnNextCharacter]\n\t\t\tif rnValue < rnNextValue {\n\t\t\t\tresult += rnNextValue - rnValue\n\t\t\t\ti++\n\t\t\t} else {\n\t\t\t\tresult += rnValue\n\t\t\t}\n\t\t} else {\n\t\t\tresult += rnValue\n\t\t}\n\t}\n\treturn result\n}", "func rnToI(rn string) (total int) {\n\tvar m = map[rune]int{\n\t\t'I': 1,\n\t\t'V': 5,\n\t\t'X': 10,\n\t\t'L': 50,\n\t\t'C': 100,\n\t\t'D': 500,\n\t\t'M': 1000,\n\t}\n\n\tfor i, v := range rn {\n\n\t\tif i == 0 {\n\t\t\ttotal += m[v]\n\t\t\tcontinue\n\t\t}\n\n\t\tif v == 'V' || v == 'X' {\n\t\t\tif rn[i-1] == 'I' {\n\t\t\t\ttotal -= m['I'] + m[v]\n\t\t\t}\n\t\t} else if v == 'L' || v == 'C' {\n\t\t\tif rn[i-1] == 'X' {\n\t\t\t\ttotal -= m['X'] + m[v]\n\t\t\t}\n\t\t} else if v == 'D' || v == 'M' {\n\t\t\tif rn[i-1] == 'C' {\n\t\t\t\ttotal -= m['C'] + m[v]\n\t\t\t}\n\t\t} else {\n\t\t\ttotal += m[v]\n\t\t}\n\t\tfmt.Println(total)\n\n\t}\n\treturn\n}", "func ToRomanNumeral(arabic int) (roman string, err error) {\n\tif (arabic > 3000) || (arabic < 1) {\n\t\terr = errors.New(\"No legitimate corresponding roman numeral.\")\n\t\treturn\n\t}\n\tdigits := []rune(strconv.Itoa(arabic))\n\tif len(digits) > 3 {\n\t\tstr := string(digits[0])\n\t\tswitch str {\n\t\tcase \"1\":\n\t\t\troman += \"M\"\n\t\tcase \"2\":\n\t\t\troman += \"MM\"\n\t\tcase \"3\":\n\t\t\troman += \"MMM\"\n\t\t}\n\t\tdigits = digits[1:]\n\t}\n\tif len(digits) > 2 {\n\t\tstr := string(digits[0])\n\t\tswitch str {\n\t\tcase \"9\":\n\t\t\troman += \"CM\"\n\t\tcase \"8\":\n\t\t\troman += \"DCCC\"\n\t\tcase \"7\":\n\t\t\troman += \"DCC\"\n\t\tcase \"6\":\n\t\t\troman += \"DC\"\n\t\tcase \"5\":\n\t\t\troman += \"D\"\n\t\tcase \"4\":\n\t\t\troman += \"CD\"\n\t\tcase \"3\":\n\t\t\troman += \"CCC\"\n\t\tcase \"2\":\n\t\t\troman += \"CC\"\n\t\tcase \"1\":\n\t\t\troman += \"C\"\n\t\t}\n\t\tdigits = digits[1:]\n\t}\n\tif len(digits) > 1 {\n\t\tstr := string(digits[0])\n\t\tswitch str {\n\t\tcase \"9\":\n\t\t\troman += \"XC\"\n\t\tcase \"8\":\n\t\t\troman += \"LXXX\"\n\t\tcase \"7\":\n\t\t\troman += \"LXX\"\n\t\tcase \"6\":\n\t\t\troman += \"LX\"\n\t\tcase \"5\":\n\t\t\troman += \"L\"\n\t\tcase \"4\":\n\t\t\troman += \"XL\"\n\t\tcase \"3\":\n\t\t\troman += \"XXX\"\n\t\tcase \"2\":\n\t\t\troman += \"XX\"\n\t\tcase \"1\":\n\t\t\troman += \"X\"\n\t\t}\n\t\tdigits = digits[1:]\n\t}\n\tstr := string(digits)\n\tswitch str {\n\tcase \"9\":\n\t\troman += \"IX\"\n\tcase \"8\":\n\t\troman += \"VIII\"\n\tcase \"7\":\n\t\troman += \"VII\"\n\tcase \"6\":\n\t\troman += \"VI\"\n\tcase \"5\":\n\t\troman += \"V\"\n\tcase \"4\":\n\t\troman += \"IV\"\n\tcase \"3\":\n\t\troman += \"III\"\n\tcase \"2\":\n\t\troman += \"II\"\n\tcase \"1\":\n\t\troman += \"I\"\n\t}\n\treturn\n}", "func Test_romanToInt(t *testing.T) {\n\tConvey(\"Test_romanToInt\", t, func() {\n\n\t\tConvey(\"III\", func() {\n\t\t\tSo(IntShouldEqual(romanToInt(\"III\"), 3), ShouldBeTrue)\n\t\t})\n\t\tConvey(\"IV\", func() {\n\t\t\tSo(IntShouldEqual(romanToInt(\"IV\"), 4), ShouldBeTrue)\n\t\t})\n\n\t\tConvey(\"IX\", func() {\n\t\t\tSo(IntShouldEqual(romanToInt(\"IX\"), 9), ShouldBeTrue)\n\t\t})\n\n\t\tConvey(\"LVIII\", func() {\n\t\t\tSo(IntShouldEqual(romanToInt(\"LVIII\"), 58), ShouldBeTrue)\n\t\t})\n\t\tConvey(\"MCMXCIV\", func() {\n\t\t\tSo(IntShouldEqual(romanToInt(\"MCMXCIV\"), 1994), ShouldBeTrue)\n\t\t})\n\n\t})\n}", "func ConvertingToArabic(roman string) int {\n\ttotal := 0\n\tfor range roman {\n\t\ttotal++\n\t}\n\treturn total\n}", "func intToRoman(num int) string {\n\treturn _0012(num)\n}", "func IntToRoman(num int) string {\n\tresult := \"\"\n\tdict10 := map[int]string{\n\t\t3: \"M\",\n\t\t2: \"C\",\n\t\t1: \"X\",\n\t\t0: \"I\",\n\t}\n\tdict5 := map[int]string{\n\t\t2: \"D\",\n\t\t1: \"L\",\n\t\t0: \"V\",\n\t}\n\tfor ; num > 0; {\n\t\t// how many digits in the number\n\t\tn := int(math.Trunc(math.Log10(float64(num))))\n\t\tp := int(math.Pow10(n))\n\t\t// first digit of the number\n\t\tdigit := num / p\n\t\tswitch {\n\t\tcase digit < 4:\n\t\t\tresult = result + strings.Repeat(dict10[n], digit)\n\t\tcase digit == 4:\n\t\t\tresult = result + dict10[n] + dict5[n]\n\t\tcase digit >= 5 && digit < 9:\n\t\t\tresult = result + dict5[n] + strings.Repeat(dict10[n], digit-5)\n\t\tcase digit == 9:\n\t\t\tresult = result + dict10[n] + dict10[n+1]\n\t\t}\n\t\tnum -= digit * p\n\t}\n\treturn result\n}", "func ConvertToRomanNumeral(anInt int) string {\n\tresult := \"\"\n\tfor anInt > 0 {\n\t\tintReductionAmount := largestInteger(anInt)\n\t\tresult += numberToRomanNumeralMap[intReductionAmount]\n\t\tanInt -= intReductionAmount\n\t}\n\treturn result\n}", "func ToRomanNumeral(arabic int) (string, error) {\n\n\tif arabic <= 0 || arabic > 3000 {\n\t\treturn \"\", errors.New(\"true\")\n\t}\n\n\tvar romannumeral string\n\tresidual := arabic\n\n\tfor a := 0; a < (arabic / 1000); a++ {\n\t\tromannumeral = romannumeral + \"M\"\n\t\tresidual = residual - 1000\n\t}\n\tarabic = residual\n\n\tif arabic >= 900 {\n\t\tromannumeral = romannumeral + \"CM\"\n\t\tresidual = residual - 900\n\t} else {\n\t\tfor a := 0; a < (arabic / 500); a++ {\n\t\t\tromannumeral = romannumeral + \"D\"\n\t\t\tresidual = residual - 500\n\t\t}\n\t}\n\tarabic = residual\n\n\tif arabic >= 400 {\n\t\tromannumeral = romannumeral + \"CD\"\n\t\tresidual = residual - 400\n\t} else {\n\t\tfor a := 0; a < (arabic / 100); a++ {\n\t\t\tromannumeral = romannumeral + \"C\"\n\t\t\tresidual = residual - 100\n\t\t}\n\t}\n\tarabic = residual\n\n\tif arabic >= 90 {\n\t\tromannumeral = romannumeral + \"XC\"\n\t\tresidual = residual - 90\n\t} else {\n\t\tfor a := 0; a < (arabic / 50); a++ {\n\t\t\tromannumeral = romannumeral + \"L\"\n\t\t\tresidual = residual - 50\n\t\t}\n\t}\n\tarabic = residual\n\n\tif arabic >= 40 {\n\t\tromannumeral = romannumeral + \"XL\"\n\t\tresidual = residual - 40\n\t} else {\n\t\tfor a := 0; a < (arabic / 10); a++ {\n\t\t\tromannumeral = romannumeral + \"X\"\n\t\t\tresidual = residual - 10\n\t\t}\n\t}\n\tarabic = residual\n\n\tif arabic == 9 {\n\t\tromannumeral = romannumeral + \"IX\"\n\t\tresidual = residual - 9\n\t} else {\n\t\tfor a := 0; a < (arabic / 5); a++ {\n\t\t\tromannumeral = romannumeral + \"V\"\n\t\t\tresidual = residual - 5\n\t\t}\n\t}\n\tarabic = residual\n\n\tif arabic == 4 {\n\t\tromannumeral = romannumeral + \"IV\"\n\t} else {\n\t\tfor a := 0; a < (arabic / 1); a++ {\n\t\t\tromannumeral = romannumeral + \"I\"\n\t\t\tresidual = residual - 1\n\t\t}\n\t}\n\treturn romannumeral, nil\n}", "func aToi(str string) int {\n\tatoi, err := strconv.Atoi(str)\n\terrFatal(err)\n\n\treturn atoi\n}", "func main() {\n\tfmt.Println(intToRoman2(1994))\n}", "func ToRomanNumeral(arabic int) (string, error) {\n\n\tif arabic > 3000 {\n\t\treturn \"\", errors.New(\"arabic number greater than 3000\")\n\t}\n\n\tif arabic <= 0 {\n\t\treturn \"\", errors.New(\"cannot convert negative numbers\")\n\t}\n\n\tvar romanNumber string\n\tvar quotient = arabic\n\tvar rest int\n\tfor i := 1; quotient > 0; i = i * 10 {\n\t\tquotient = arabic / (i * 10)\n\t\trest = arabic % (i * 10)\n\t\tromanNumber = translate(rest, i) + romanNumber\n\t}\n\n\treturn romanNumber, nil\n}", "func Str2int(Num string)int{\n s, _ := strconv.Atoi(Num)\n return s\n}", "func ToRomanNumeral(a int) (string, error) {\n\tif a <= 0 || a > 3000 {\n\t\treturn \"\", errors.New(\"there is no need to be able to convert numbers larger than about 3000\")\n\t}\n\n\tr := hirelo(a/1000, \"\", \"\", \"M\")\n\ta %= 1000\n\tr += hirelo(a/100, \"M\", \"D\", \"C\")\n\ta %= 100\n\tr += hirelo(a/10, \"C\", \"L\", \"X\")\n\ta %= 10\n\tr += hirelo(a, \"X\", \"V\", \"I\")\n\n\treturn r, nil\n}", "func intToRoman(num int) string {\n\tif num == 0 {\n\t\treturn \"\"\n\t}\n\n\troman := \"\"\n\tfor num > 0 {\n\t\tif num >= 900 {\n\t\t\troman, num = largerNumeral(roman, num, 1000, \"M\", 100, \"C\")\n\t\t\tcontinue\n\t\t}\n\n\t\tif num >= 400 {\n\t\t\troman, num = largerNumeral(roman, num, 500, \"D\", 100, \"C\")\n\t\t\tcontinue\n\t\t}\n\n\t\tif num >= 90 {\n\t\t\troman, num = largerNumeral(roman, num, 100, \"C\", 10, \"X\")\n\t\t\tcontinue\n\t\t}\n\n\t\tif num >= 40 {\n\t\t\troman, num = largerNumeral(roman, num, 50, \"L\", 10, \"X\")\n\t\t\tcontinue\n\t\t}\n\n\t\tif num >= 9 {\n\t\t\troman, num = largerNumeral(roman, num, 10, \"X\", 1, \"I\")\n\t\t\tcontinue\n\t\t}\n\n\t\tif num >= 4 {\n\t\t\troman, num = largerNumeral(roman, num, 5, \"V\", 1, \"I\")\n\t\t\tcontinue\n\t\t}\n\n\t\troman += \"I\"\n\t\tnum--\n\t}\n\n\treturn roman\n}", "func ToRomanNumeral(a int) (string, error) {\n\tvar s strings.Builder\n\n\tif a > 3000 || a <= 0 {\n\t\treturn \"\", errors.New(\" Invalid input\")\n\t}\n\t// Convert the int to String.\n\t// E.g. 123 => \"[1,2,3]\"\n\tfvs := strings.Split(strconv.Itoa(a), \"\")\n\n\tlenFVS := len(fvs)\n\tindex := 0\n\tfor pos := lenFVS; pos >= 2; pos-- {\n\n\t\tplaceValue := int(math.Pow10(pos - 1))\n\n\t\t// How many times to repeat e.g. 100 * 2\n\t\trepeatTimes, _ := strconv.Atoi(fvs[index])\n\t\tif arabicMap[(placeValue*repeatTimes)] != \"\" {\n\t\t\tfmt.Fprintf(&s, \"%s\", arabicMap[(placeValue*repeatTimes)])\n\t\t} else {\n\t\t\tfmt.Fprintf(&s, \"%s\", strings.Repeat(arabicMap[placeValue], repeatTimes))\n\t\t}\n\n\t\tindex++\n\t}\n\n\tval, _ := strconv.Atoi(fvs[index])\n\tfmt.Fprintf(&s, \"%s\", onesConversion(val))\n\treturn s.String(), nil\n}", "func intToRoman(num int) string {\n\tvar int2Str = make(map[int]string, 0)\n\tint2Str[1] = \"I\"\n\tint2Str[4] = \"IV\"\n\tint2Str[5] = \"V\"\n\tint2Str[9] = \"IX\"\n\tint2Str[10] = \"X\"\n\tint2Str[40] = \"XL\"\n\tint2Str[50] = \"L\"\n\tint2Str[90] = \"XC\"\n\tint2Str[100] = \"C\"\n\tint2Str[400] = \"CD\"\n\tint2Str[500] = \"D\"\n\tint2Str[900] = \"CM\"\n\tint2Str[1000] = \"M\"\n\tresult := \"\"\n\tnums := []int{1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1}\n\tfor _, n := range nums {\n\t\tfor num >= n {\n\t\t\tresult += int2Str[n]\n\t\t\tnum -= n\n\t\t}\n\t}\n\treturn result\n}", "func ToRomanNumeral(in int) (out string, err error) {\n\tif in <= 0 || in > 3000 {\n\t\treturn \"\", errors.New(\"Invalid input. Must be between 1 - 3000 inclusive\")\n\t}\n\tones := in % 10\n\ttens := (in % 100) / 10\n\thundreds := (in % 1000) / 100\n\tthousands := (in % 10000) / 1000\n\tout += convertDigit(thousands, \"M\", \"_\", \"_\")\n\tout += convertDigit(hundreds, \"C\", \"D\", \"M\")\n\tout += convertDigit(tens, \"X\", \"L\", \"C\")\n\tout += convertDigit(ones, \"I\", \"V\", \"X\")\n\treturn out, nil\n}", "func intToRoman(num int) string {\n\troman := \"\"\n\tfor num > 0 {\n\t\tif num >= 1000 {\n\t\t\troman += \"M\"\n\t\t\tnum -= 1000\n\t\t} else if num >= 900 {\n\t\t\troman += \"CM\"\n\t\t\tnum -= 900\n\t\t} else if num >= 500 {\n\t\t\troman += \"D\"\n\t\t\tnum -= 500\n\t\t} else if num >= 400 {\n\t\t\troman += \"CD\"\n\t\t\tnum -= 400\n\t\t} else if num >= 100 {\n\t\t\troman += \"C\"\n\t\t\tnum -= 100\n\t\t} else if num >= 90 {\n\t\t\troman += \"XC\"\n\t\t\tnum -= 90\n\t\t} else if num >= 50 {\n\t\t\troman += \"L\"\n\t\t\tnum -= 50\n\t\t} else if num >= 40 {\n\t\t\troman += \"XL\"\n\t\t\tnum -= 40\n\t\t} else if num >= 10 {\n\t\t\troman += \"X\"\n\t\t\tnum -= 10\n\t\t} else if num >= 9 {\n\t\t\troman += \"IX\"\n\t\t\tnum -= 9\n\t\t} else if num >= 5 {\n\t\t\troman += \"V\"\n\t\t\tnum -= 5\n\t\t} else if num >= 4 {\n\t\t\troman += \"IV\"\n\t\t\tnum -= 4\n\t\t} else if num >= 1 {\n\t\t\troman += \"I\"\n\t\t\tnum -= 1\n\t\t}\n\t}\n\treturn roman\n}", "func String2Int(s string) int {\n\ti, _ := strconv.Atoi(s)\n\treturn i\n}", "func ToRomanNumeral(arabic int) (roman string) {\n\tfor arabic > 0 {\n\t\tfor _, numeral := range correlations {\n\t\t\tif arabic-numeral.k >= 0 {\n\t\t\t\tarabic -= numeral.k\n\t\t\t\troman += numeral.v\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func TestParseRomani(t *testing.T) {\n\tromaniTests := []struct {\n\t\tinput string\n\t\twant int\n\t}{\n\t\t{\"I\", 1},\n\t\t{\"II\", 2},\n\t\t{\"III\", 3},\n\t\t{\"IV\", 4},\n\t\t{\"V\", 5},\n\t\t{\"VI\", 6},\n\t\t{\"VII\", 7},\n\t\t{\"VIII\", 8},\n\t\t{\"IX\", 9},\n\t\t{\"X\", 10},\n\t\t{\"XX\", 20},\n\t\t{\"XXX\", 30},\n\t\t{\"XXXIX\", 39},\n\t}\n\n\tfor _, tc := range romaniTests {\n\t\tgot, _ := ParseRomani(tc.input)\n\t\tif got != tc.want {\n\t\t\tt.Errorf(\"got %v want %v\", got, tc.want)\n\t\t}\n\t}\n}", "func Str2Int(v interface{}) (i int) {\n\tswitch v.(type) {\n\tcase string:\n\t\ti, _ = strconv.Atoi(v.(string))\n\n\tcase int:\n\t\ti = v.(int)\n\t}\n\n\treturn\n}", "func Atoi(s string) (int, error) {\n\treturn strconv.Atoi(s)\n}", "func ToRomanNumeral(n int) (string, error) {\n\tif n <= 0 || n > 3000 {\n\t\treturn \"\", errors.New(\"invalid input\")\n\t}\n\n\tvar res string\n\tfor i, val := range values {\n\t\tfor n >= val {\n\t\t\tn -= val\n\t\t\tres += symbols[i]\n\t\t}\n\t}\n\treturn res, nil\n}", "func Atoi(s string) int {\n\ti, err := strconv.Atoi(s)\n\tPanicOnError(err, \"bad integer\")\n\n\treturn i\n}", "func convInt(n string) int {\r\n\tpnt, err := strconv.Atoi(n)\r\n\tif err != nil {\r\n\t\treturn -1\r\n\t}\r\n\treturn pnt\r\n}", "func ToRomanNumeral(number int) (string, error) {\n\tif number <= 0 || number >= 4000 {\n\t\treturn \"\", ErrOutOfBounds\n\t}\n\n\tvar buf bytes.Buffer\n\tfor _, numericInfo := range intToNumeral {\n\t\tlimit := number / numericInfo.value\n\t\tfor i := 0; i < limit; i++ {\n\t\t\tbuf.WriteString(numericInfo.numeral)\n\t\t\tnumber -= numericInfo.value\n\t\t}\n\t}\n\n\treturn buf.String(), nil\n}", "func intToRoman(num int) string {\n\thash := map[int]string{\n\t\t1: \"I\",\n\t\t4: \"IV\",\n\t\t5: \"V\",\n\t\t9: \"IX\",\n\t\t10: \"X\",\n\t\t40: \"XL\",\n\t\t50: \"L\",\n\t\t90: \"XC\",\n\t\t100: \"C\",\n\t\t400: \"CD\",\n\t\t500: \"D\",\n\t\t900: \"CM\",\n\t\t1000: \"M\",\n\t}\n\tarr := []int{1, 4, 5, 9, 10, 40, 50, 90, 100, 400, 500, 900, 1000}\n\tres := \"\"\n\n\tfor i := len(arr) - 1; i >= 0; i-- {\n\t\tif num >= arr[i] {\n\t\t\ttmp := num / arr[i]\n\t\t\tfor j := 0; j < tmp; j++ {\n\t\t\t\tres += hash[arr[i]]\n\t\t\t}\n\t\t\tnum = num % arr[i]\n\t\t}\n\t}\n\n\treturn res\n}", "func (s *Str) Int() int {\n\tval, err := strconv.Atoi(s.val)\n\tif err != nil {\n\t\ts.err = err\n\t}\n\treturn val\n}", "func convertStr(location string, s string) int {\n\ti, err := strconv.Atoi(s)\n\tif err != nil {\n\t\tlog.Fatalf(\"Invalid %s: %s\\n\", location, s)\n\t}\n\treturn i\n}", "func Atoi(str string) int {\n\t// Validate +/- sign\n\t// Validate empty string\n\tif str == \"\" {\n\t\treturn 0\n\t}\n\t// Assumption: int32\n\tMAX_INT := 1<<31 - 1\n\tS_MAX_INT := ^MAX_INT\n\tnum := 0\n\tmax := MAX_INT / 10\n\tunsigned := true\n\tif string(str[0]) == \"-\" {\n\t\tmax = S_MAX_INT / 10\n\t\tunsigned = false\n\t\tstr = str[1:]\n\t}\n\tfor _, v := range str {\n\t\t// Validate non-integer\n\t\tif notDigit(v) {\n\t\t\treturn 0\n\t\t}\n\t\tdigit := runeToDigit(v)\n\t\tif unsigned {\n\t\t\tif num > max ||\n\t\t\t\t// When num = MAX_INT/10\n\t\t\t\t(num == max && MAX_INT%10 <= digit) {\n\t\t\t\treturn MAX_INT\n\t\t\t}\n\t\t\tnum = num*10 + digit\n\t\t} else {\n\t\t\t// When num < S_MAX_INT/10\n\t\t\tif num < max ||\n\t\t\t\t// When num = S_MAX_INT/10\n\t\t\t\t(num == max && -1*S_MAX_INT%10 <= digit) {\n\t\t\t\treturn S_MAX_INT\n\t\t\t}\n\t\t\tnum = num*10 - digit\n\t\t}\n\t}\n\treturn num\n}", "func Atoi(s string) int {\n\tresult, err := strconv.Atoi(s)\n\tMustNotError(err)\n\treturn result\n}", "func ToRomanNumeral(n int) (string, error) {\n\tif n <= 0 || n >= 4000 {\n\t\treturn \"\", errors.New(\"n must be within range 1-3999\")\n\t}\n\tvar buf bytes.Buffer\n\tfor {\n\t\tif n == 0 {\n\t\t\tbreak\n\t\t}\n\t\tfor i := len(translateTable) - 1; i >= 0; i-- {\n\t\t\tcnt := n / translateTable[i].arabic\n\t\t\tn = n % translateTable[i].arabic\n\t\t\tbuf.WriteString(strings.Repeat(translateTable[i].roman, cnt))\n\t\t}\n\n\t}\n\treturn buf.String(), nil\n}", "func intToRoman(num int) string {\n\tstr := \"\"\n\n\tfor ; num >= 1000; num -= 1000 {\n\t\tstr += \"M\"\n\t}\n\n\tstr += _intToRoman(num, 100, \"C\", \"D\", \"M\") +\n\t\t_intToRoman(num, 10, \"X\", \"L\", \"C\") +\n\t\t_intToRoman(num, 1, \"I\", \"V\", \"X\")\n\treturn str\n}", "func myAtoi(str string) int {\n\treturn convert(clean(str))\n}", "func titleToNumber(s string) int {\n\tn := 0\n\tfor _, r := range s {\n\t\tc := int(r - 64)\n\t\tn = n*26 + c\n\t}\n\treturn n\n}", "func String2Int(v string) int {\n\treturn int(String2Int64(v))\n}", "func Atoi(s string) (int, bool) {\n\tif s == \"\" {\n\t\treturn 0, false\n\t}\n\n\tneg := false\n\tif s[0] == '-' {\n\t\tneg = true\n\t\ts = s[1:]\n\t}\n\n\tun := uint(0)\n\tfor i := 0; i < len(s); i++ {\n\t\tc := s[i]\n\t\tif c < '0' || c > '9' {\n\t\t\treturn 0, false\n\t\t}\n\t\tif un > maxUint/10 {\n\t\t\t// overflow\n\t\t\treturn 0, false\n\t\t}\n\t\tun *= 10\n\t\tun1 := un + uint(c) - '0'\n\t\tif un1 < un {\n\t\t\t// overflow\n\t\t\treturn 0, false\n\t\t}\n\t\tun = un1\n\t}\n\n\tif !neg && un > uint(maxInt) {\n\t\treturn 0, false\n\t}\n\tif neg && un > uint(maxInt)+1 {\n\t\treturn 0, false\n\t}\n\n\tn := int(un)\n\tif neg {\n\t\tn = -n\n\t}\n\n\treturn n, true\n}", "func (n SStr) ToInt() int {\n\tv, _ := strconv.ParseInt(string(n), 10, strconv.IntSize)\n\treturn int(v)\n}", "func integerToRoman(number int) string {\n\tmaxRomanNumber := 3999\n\tif number > maxRomanNumber {\n\t\treturn strconv.Itoa(number)\n\t}\n\n\tconversions := []struct {\n\t\tvalue int\n\t\tdigit string\n\t}{\n\t\t{1000, \"M\"},\n\t\t{900, \"CM\"},\n\t\t{500, \"D\"},\n\t\t{400, \"CD\"},\n\t\t{100, \"C\"},\n\t\t{90, \"XC\"},\n\t\t{50, \"L\"},\n\t\t{40, \"XL\"},\n\t\t{10, \"X\"},\n\t\t{9, \"IX\"},\n\t\t{5, \"V\"},\n\t\t{4, \"IV\"},\n\t\t{1, \"I\"},\n\t}\n\n\tvar roman strings.Builder\n\tfor _, conversion := range conversions {\n\t\tfor number >= conversion.value {\n\t\t\troman.WriteString(conversion.digit)\n\t\t\tnumber -= conversion.value\n\t\t}\n\t}\n\n\treturn roman.String()\n}", "func atoi(a string) int {\n\ti, err := strconv.Atoi(a)\n\tif err != nil {\n\t\tnumErrors++\n\t\ti = 0\n\t}\n\n\treturn i\n}", "func getASCIISum(s string) (sum int) {\n\tfor _, c := range s {\n\t\tc = (c & 0xdf) - 'A'\n\t\tsum += int(c)\n\t}\n\treturn\n}", "func atoi(s string) int {\n\td, _ := strconv.Atoi(s)\n\treturn d\n}", "func atoi(s string) int {\n\td, _ := strconv.Atoi(s)\n\treturn d\n}", "func Strtoi(s string) int {\n\tif i, err := strconv.Atoi(s); err != nil {\n\t\tpanic(errors.New(\"[argument error]: Strtoi only accepts integer string\"))\n\t} else {\n\t\treturn i\n\t}\n}", "func atoI(numStr string) (num int, err error) {\n\tvar num64 int64\n\tnum64, err = strconv.ParseInt(numStr, 10, 64)\n\tif err != nil {\n\t\treturn\n\t}\n\tnum = int(num64)\n\treturn\n}", "func atoi(s string) int64 {\n\tsCopy := s\n\tif sCopy[0] == '-' {\n\t\tsCopy = s[1:]\n\t}\n\n\tres := 0\n\tfor _, myrune := range sCopy {\n\t\tres = res*10 + (int(myrune) - 48)\n\t}\n\n\tif s[0] == '-' {\n\t\tres = res * -1\n\t}\n\n\tvar result int64\n\tresult = int64(res)\n\n\treturn result\n}", "func (n StringNumber) Int() int {\n\treturn int(n)\n}", "func toInt(rawInt string) int {\n\tparsed, _ := strconv.Atoi(strings.Replace(strings.Replace(rawInt, \"$\", \"\", -1), \",\", \"\", -1))\n\treturn parsed\n}", "func toInt(digit rune) int {\n\treturn int(digit) - '0'\n}", "func atom2int(atom gaesearch.Atom) int {\n\ti, _ := strconv.Atoi(string(atom))\n\t// In case of an invalid argument, the return value is 0\n\treturn i\n}", "func Atoi(str string) uint64 {\n\tstr = strings.Trim(str, \" \")\n\tif len(str) > 0 {\n\t\ti, err := strconv.ParseUint(str, 10, 0)\n\t\tif err != nil {\n\t\t\treturn 0\n\t\t}\n\t\treturn i\n\t}\n\treturn 0\n}", "func Int(a string) int {\n\tnum, err := strconv.Atoi(a)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn num\n}", "func Int(name string) (int, error) {\n\ti, err := strconv.Atoi(String(name))\n\n\tif err != nil {\n\t\treturn 0, fmt.Errorf(\"failed to decode input %q as int: %w\", name, err)\n\t}\n\n\treturn i, nil\n}", "func Atoi(str string) int {\n\tres, err := strconv.Atoi(str)\n\tCheck(err, \"Invalid number\")\n\treturn res\n}", "func Str2Int(str string) (int, error) {\n\treturn strconv.Atoi(str)\n}", "func myAtoi(s string) int {\n\tsign, ret, i := 1, 0, 0\n\tfor ; i < len(s) && s[i] == ' '; i++ {\n\t}\n\tif i < len(s) && (s[i] == '+' || s[i] == '-') {\n\t\tif s[i] == '-' {\n\t\t\tsign = -1\n\t\t}\n\t\ti++\n\t}\n\tfor ; i < len(s) && '0' <= s[i] && s[i] <= '9'; i++ {\n\t\tret = ret*10 + int(s[i]-'0')\n\t\tif sign*ret < math.MinInt32 {\n\t\t\treturn math.MinInt32\n\t\t}\n\t\tif sign*ret > math.MaxInt32 {\n\t\t\treturn math.MaxInt32\n\t\t}\n\t}\n\treturn sign * ret\n}", "func stateToInt(s string) int {\n\tvar i int\n\tswitch {\n\tcase s == \"UNKNOWN\":\n\t\ti = uUNKNOWN\n\tcase s == \"INIT\":\n\t\ti = uINIT\n\tcase s == \"READY\":\n\t\ti = uREADY\n\tcase s == \"TEST\":\n\t\ti = uTEST\n\tcase s == \"DONE\":\n\t\ti = uDONE\n\tcase s == \"TERM\":\n\t\ti = uTERM\n\tdefault:\n\t\ti = -1\n\t}\n\treturn i\n}", "func ToInt(in string) int {\n\tif len(in) < 1 {\n\t\treturn -1\n\t}\n\n\tres, err := strconv.Atoi(in)\n\tif err != nil {\n\t\treturn -1\n\t}\n\treturn res\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func atoi(in string) int {\n\tn, err := strconv.Atoi(in)\n\tmust(err)\n\treturn n\n}", "func wordToInt(s string) int {\n\tsum := 0\n\tfor _, v := range s {\n\t\tsum += int(v) - 'A' + 1\n\t}\n\treturn sum\n\n}", "func atoi(s string) int {\n\ti, _ := strconv.Atoi(strings.TrimSpace(s))\n\treturn i\n}", "func Txt2int(s string) uint64 {\n\tx := uint64(len(s)) * Prime\n\tfor i := len(s) - 1; i >= 0; i-- {\n\t\tx ^= uint64(s[i])\n\t\tx *= Prime\n\t}\n\treturn x\n}", "func myAtoi(str string) int {\n\tnum, sign, metSign, metNumber := 0, 1, false, false\n\tfor _, r := range str {\n\t\tswitch {\n\t\tcase r == '-' && !metSign:\n\t\t\tsign = -1\n\t\t\tmetSign = true\n\t\t\tcontinue\n\t\tcase r == '+' && !metSign:\n\t\t\tmetSign = true\n\t\tcase r >= '0' && r <= '9':\n\t\t\tnum *= 10\n\t\t\tnum += int(r) - 48\n\t\t\tmetNumber = true\n\t\t\tcontinue\n\t\tcase r == ' ' && !metNumber:\n\t\t\tcontinue\n\t\tdefault:\n\t\t\treturn limitInt(num * sign)\n\t\t}\n\t}\n\treturn limitInt(num * sign)\n}", "func Int(s string) int {\n\tif s == \"\" {\n\t\treturn 0\n\t}\n\n\tresult, err := strconv.ParseInt(s, 10, 64)\n\n\tif err != nil {\n\t\treturn 0\n\t}\n\n\treturn int(result)\n}", "func AtoiDef(s string, def int) int {\n\tif i, err := strconv.Atoi(s); err == nil {\n\t\treturn i\n\t}\n\treturn def\n}", "func ToI(str string) int64 {\n\tval, err := strconv.ParseInt(str, 10, 64)\n\tL.IsError(err, str)\n\treturn val\n}", "func myAtoi(str string) int {\n\t// Trim white space\n\ti := 0\n\tfor i < len(str) && str[i] == ' ' {\n\t\ti++\n\t}\n\n\tvar negative bool\n\t// Check for negative/positive sign\n\tif i < len(str) && (str[i] == '-' || str[i] == '+') {\n\t\tnegative = (str[i] == '-')\n\t\ti++\n\t}\n\n\tvar result int\n\tfor ; i < len(str); i++ {\n\t\tif str[i] < '0' || str[i] > '9' {\n\t\t\tbreak\n\t\t}\n\n\t\t// Use MaxInt64 on a 64-bit OS\n\t\tif result > math.MaxInt32/10 || (result == math.MaxInt32/10 && str[i]-'0' > math.MaxInt32%10) {\n\t\t\tif negative {\n\t\t\t\treturn math.MinInt32\n\t\t\t}\n\t\t\treturn math.MaxInt32\n\t\t}\n\n\t\tresult = (result * 10) + int(str[i]-'0')\n\t}\n\n\tif negative {\n\t\treturn 0 - result\n\t}\n\n\treturn result\n}", "func (p Parser) Int(ctx context.Context) (*int, error) {\n\tvalue, err := p.Source.String(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif value == nil {\n\t\treturn nil, nil\n\t}\n\tparsed, err := strconv.Atoi(*value)\n\tif err != nil {\n\t\treturn nil, ex.New(err)\n\t}\n\treturn &parsed, nil\n}", "func atoi(b []byte) (int, error) {\n\tif len(b) > len(powers) {\n\t\treturn 0, fmt.Errorf(\"sam: integer overflow: %q\", b)\n\t}\n\tvar n int64\n\tk := len(b) - 1\n\tfor i, v := range b {\n\t\tn += int64(v-'0') * powers[k-i]\n\t\tif int64(int(n)) != n {\n\t\t\treturn 0, fmt.Errorf(\"sam: integer overflow: %q at %d\", b, i)\n\t\t}\n\t}\n\treturn int(n), nil\n}", "func strToInt(input string) int {\n\toutput, _ := strconv.Atoi(input)\n\treturn output\n}", "func Atoi(buf []byte) int {\n\ti, err := strconv.Atoi(string(buf))\n\tAbortIf(err)\n\treturn i\n}", "func (v Season_Ic_Ta) Int() int {\n\treturn int(v)\n}", "func AtoiNew(word string) (int, bool, bool) {\n\tisPlus := false\n\tinvalid := false\n\ttempWord := word\n\n\tif word[0] == '+' {\n\t\tisPlus = true\n\t\ttempWord = word[1:]\n\t}\n\tif isPlus && tempWord == \"\" {\n\t\tinvalid = true\n\t}\n\tif word[0] == '-' {\n\t\ttempWord = word[1:]\n\t}\n\tconverted := 0\n\tfor _, val := range tempWord {\n\t\tval = val - '0'\n\t\tif val > 9 {\n\t\t\tinvalid = true\n\t\t\treturn 0, isPlus, invalid\n\t\t}\n\t\tconverted = converted*10 + int(val)\n\t}\n\treturn converted, isPlus, invalid\n}", "func (k *Key) Int() (int, error) {\n\treturn strconv.Atoi(k.String())\n}", "func (l *lexer) Integer() string {\n\tvar result []string\n\tfor IsDigit(l.currentChar) {\n\t\tresult = append(result, l.currentChar)\n\t\tl.Advance()\n\t}\n\treturn strings.Join(result, \"\")\n}", "func (r *Response) Int() (int, error) {\n\treturn strconv.Atoi(r.String())\n}", "func atoi(s string) int {\n\tif s == \"*\" {\n\t\treturn 0\n\t}\n\n\ti, err := strconv.Atoi(strings.TrimSpace(s))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn i\n}", "func StringToInteger(param string) int {\n\tval, _ := strconv.Atoi(param)\n\treturn val\n}", "func toInt(card z.Card) int {\n\tresult := MapRankToInt[card.Rank]*10 + MapSuitToInt[card.Suit]\n\treturn result\n}", "func toInt(card z.Card) int {\n\tresult := MapRankToInt[card.Rank]*10 + MapSuitToInt[card.Suit]\n\treturn result\n}" ]
[ "0.8233444", "0.816875", "0.80990547", "0.79816216", "0.7963729", "0.7833738", "0.78313416", "0.7806245", "0.7234038", "0.6875017", "0.67909557", "0.67863166", "0.6753818", "0.6709547", "0.6693833", "0.6692171", "0.66672873", "0.66672343", "0.6628576", "0.6586043", "0.6570665", "0.6551442", "0.6537191", "0.653546", "0.6465345", "0.6427816", "0.6405867", "0.6401111", "0.63638663", "0.63091886", "0.6288281", "0.62772477", "0.62206227", "0.6214931", "0.61930954", "0.61698246", "0.61638683", "0.61542535", "0.61537147", "0.6148494", "0.6147696", "0.61278117", "0.61197805", "0.6119383", "0.6101842", "0.6099874", "0.6099822", "0.6094632", "0.6091214", "0.6069764", "0.60638267", "0.6062005", "0.6062005", "0.6056493", "0.60518", "0.6051339", "0.60457855", "0.6042436", "0.6017622", "0.6007859", "0.60030746", "0.5991525", "0.5990857", "0.5990138", "0.59882474", "0.5959412", "0.59524065", "0.59390956", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.59232885", "0.5894646", "0.5889287", "0.5886193", "0.5876891", "0.5857667", "0.58563644", "0.5822008", "0.5819814", "0.58046377", "0.5803305", "0.5788054", "0.5767795", "0.57666737", "0.57601446", "0.5753806", "0.5747859", "0.572077", "0.57170373", "0.57144624", "0.56955594", "0.56955594" ]
0.7710532
8
Init initializes a fabricca server
func (s *Server) Init(renew bool) (err error) { err = s.init(renew) err2 := s.closeDB() if err2 != nil { log.Errorf("Close DB failed: %s", err2) } return err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Init(c *conf.Config) {\n\t// service\n\tinitService(c)\n\t// init grpc\n\tgrpcSvr = grpc.New(nil, arcSvc, newcomerSvc)\n\tengineOuter := bm.DefaultServer(c.BM.Outer)\n\t// init outer router\n\touterRouter(engineOuter)\n\tif err := engineOuter.Start(); err != nil {\n\t\tlog.Error(\"engineOuter.Start() error(%v) | config(%v)\", err, c)\n\t\tpanic(err)\n\t}\n}", "func (as *ArgocdServer) Init() error {\n\tfor _, f := range []func() error{\n\t\tas.initClientSet,\n\t\tas.initTLSConfig,\n\t\tas.initRegistry,\n\t\tas.initDiscovery,\n\t\tas.initMicro,\n\t\tas.initHTTPService,\n\t\tas.initProxyAgent,\n\t\t//as.initMetric,\n\t} {\n\t\tif err := f(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func Init(c *conf.Config) {\n\tinitService(c)\n\t// init inner router\n\tengine := bm.DefaultServer(c.HTTPServer)\n\tinnerRouter(engine)\n\t// init inner server\n\tif err := engine.Start(); err != nil {\n\t\tlog.Error(\"engine.Start error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func Init(c *conf.Config, s *service.Service) {\n\tsrv = s\n\t// init inner router\n\teng := bm.DefaultServer(c.BM)\n\tinitRouter(eng)\n\t// init inner server\n\tif err := eng.Start(); err != nil {\n\t\tlog.Error(\"bm.DefaultServer error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func (hfc *FabricSetup) Init() {\n\thfc.Sdk = hfc.setupSDK(hfc.ConfigFileName)\n\thfc.cleanupUserData()\n\thfc.setupCA()\n}", "func (ks *KerbServer) Init(srv string) error {\n\tservice := C.CString(srv)\n\tdefer C.free(unsafe.Pointer(service))\n\n\tresult := 0\n\n\tks.state = C.new_gss_server_state()\n\tif ks.state == nil {\n\t\treturn errors.New(\"Failed to allocate memory for gss_server_state\")\n\t}\n\n\tresult = int(C.authenticate_gss_server_init(service, ks.state))\n\n\tif result == C.AUTH_GSS_ERROR {\n\t\treturn ks.GssError()\n\t}\n\n\treturn nil\n}", "func Init(c *conf.Config, s *service.Service) {\n\tpushSrv = s\n\tauthSrv = permit.New(c.Auth)\n\tengine := bm.DefaultServer(c.HTTPServer)\n\troute(engine)\n\tif err := engine.Start(); err != nil {\n\t\tlog.Error(\"engine.Start error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func ServerInit(cfg *Config) {\n\tGenNewCreds(cfg)\n}", "func Init() {\n\tlog.Debug().Caller().Msg(\"initialize server\")\n\tr := router()\n\tr.Run()\n}", "func (c *Client) Init() error {\n\tc.ac.tlsConfig = c.TLSConfig\n\tif err := c.ac.init(); err != nil {\n\t\treturn err\n\t}\n\treturn c.setup()\n}", "func Init(ac *atmi.ATMICtx) int {\n\n\tac.TpLogWarn(\"Doing server init...\")\n\tif err := ac.TpInit(); err != nil {\n\t\treturn FAIL\n\t}\n\n\t//Get the configuration\n\n\t//Allocate configuration buffer\n\tbuf, err := ac.NewUBF(16 * 1024)\n\tif nil != err {\n\t\tac.TpLogError(\"Failed to allocate buffer: [%s]\", err.Error())\n\t\treturn FAIL\n\t}\n\n\tbuf.BChg(u.EX_CC_CMD, 0, \"g\")\n\tbuf.BChg(u.EX_CC_LOOKUPSECTION, 0, fmt.Sprintf(\"%s/%s\", PROGSECTION, os.Getenv(\"NDRX_CCTAG\")))\n\n\tif _, err := ac.TpCall(\"@CCONF\", buf, 0); nil != err {\n\t\tac.TpLogError(\"ATMI Error %d:[%s]\\n\", err.Code(), err.Message())\n\t\treturn FAIL\n\t}\n\n\t//Dump to log the config read\n\tbuf.TpLogPrintUBF(atmi.LOG_DEBUG, \"Got configuration.\")\n\n\toccs, _ := buf.BOccur(u.EX_CC_KEY)\n\n\t// Load in the config...\n\tfor occ := 0; occ < occs; occ++ {\n\t\tac.TpLogDebug(\"occ %d\", occ)\n\t\tfldName, err := buf.BGetString(u.EX_CC_KEY, occ)\n\n\t\tif nil != err {\n\t\t\tac.TpLogError(\"Failed to get field \"+\n\t\t\t\t\"%d occ %d\", u.EX_CC_KEY, occ)\n\t\t\treturn FAIL\n\t\t}\n\n\t\tac.TpLogDebug(\"Got config field [%s]\", fldName)\n\n\t\tswitch fldName {\n\n\t\tcase \"mykey1\":\n\t\t\tmyval, _ := buf.BGetString(u.EX_CC_VALUE, occ)\n\t\t\tac.TpLogDebug(\"Got [%s] = [%s] \", fldName, myval)\n\t\t\tbreak\n\n\t\tdefault:\n\n\t\t\tbreak\n\t\t}\n\t}\n\t//Advertize TESTSVC\n\tif err := ac.TpAdvertise(\"TESTSVC\", \"TESTSVC\", TESTSVC); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\",\n\t\t\terr.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\tif err := ac.TpAdvertise(\"CORSVC\", \"CORSVC\", CORSVC); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\",\n\t\t\terr.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\tif err := ac.TpAdvertise(\"CONSTAT\", \"CONSTAT\", CONSTAT); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\",\n\t\t\terr.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\treturn SUCCEED\n}", "func Init(c *conf.Config) {\n\t// service\n\tinitService(c)\n\tSvc.InitCron()\n\t// init inner router\n\teng := bm.DefaultServer(c.BM.Outer)\n\tinnerRouter(eng)\n\tif err := eng.Start(); err != nil {\n\t\tlog.Error(\"bm.DefaultServer error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func Init(c *conf.Config) {\n\tinitService(c)\n\tengine := bm.DefaultServer(c.HTTPServer)\n\tinnerRouter(engine)\n\t// init internal server\n\tif err := engine.Start(); err != nil {\n\t\tlog.Error(\"httpx.Serve error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func (r *Raft) Init(config *ClusterConfig, thisServerId int) {\n\tgo r.AcceptConnection(r.ClusterConfigV.Servers[thisServerId].ClientPort) // done\n\tgo r.AcceptRPC(r.ClusterConfigV.Servers[thisServerId].LogPort)\t//\n\tgo SendHeartbeat() // NOT Done TODO\n\tgo Evaluator()\t//\n\tgo AppendCaller()\n\tgo CommitCaller()\n\tgo DataWriter()\n\tr.SetElectionTimer()\n\tgo Loop() //if he is not leader TODO\n}", "func (gs *GRPCClient) Init() error {\n\treturn nil\n}", "func (kc *KerbClient) Init(srv, princ string) error {\n\tservice := C.CString(srv)\n\tdefer C.free(unsafe.Pointer(service))\n\tprincipal := C.CString(princ)\n\tdefer C.free(unsafe.Pointer(principal))\n\n\tvar delegatestate *C.gss_server_state\n\tgss_flags := C.long(C.GSS_C_MUTUAL_FLAG | C.GSS_C_SEQUENCE_FLAG)\n\tresult := 0\n\n\tkc.state = C.new_gss_client_state()\n\tif kc.state == nil {\n\t\treturn errors.New(\"Failed to allocate memory for gss_client_state\")\n\t}\n\n\tresult = int(C.authenticate_gss_client_init(service, principal, gss_flags, delegatestate, kc.state))\n\n\tif result == C.AUTH_GSS_ERROR {\n\t\treturn kc.GssError()\n\t}\n\n\treturn nil\n}", "func Init() error {\n\tconfigServerURL, err := GetConfigServerEndpoint()\n\tif err != nil {\n\t\topenlog.Warn(\"can not get config server endpoint: \" + err.Error())\n\t\treturn err\n\t}\n\n\tvar enableSSL bool\n\ttlsConfig, tlsError := getTLSForClient(configServerURL)\n\tif tlsError != nil {\n\t\topenlog.Error(fmt.Sprintf(\"Get %s.%s TLS config failed, err:[%s]\",\n\t\t\tconfigServerName, common.Consumer, tlsError.Error()))\n\t\treturn tlsError\n\t}\n\n\t/*This condition added because member discovery can have multiple ip's with IsHTTPS\n\thaving both true and false value.*/\n\tif tlsConfig != nil {\n\t\tenableSSL = true\n\t}\n\n\tinterval := config.GetConfigServerConf().RefreshInterval\n\tif interval == 0 {\n\t\tinterval = 30\n\t}\n\n\terr = initConfigServer(configServerURL, enableSSL, tlsConfig, interval)\n\tif err != nil {\n\t\topenlog.Error(\"failed to init config server: \" + err.Error())\n\t\treturn err\n\t}\n\n\topenlog.Warn(\"config server init success\")\n\treturn nil\n}", "func (s *Server) Init(c Configuration) (o *Server, err error) {\n\to = s\n\n\t// Init UDP server\n\tif err = o.serverUDP.Init(c); err != nil {\n\t\treturn\n\t}\n\n\t// Init HTTP server\n\tif err = o.serverHTTP.Init(c); err != nil {\n\t\treturn\n\t}\n\treturn\n}", "func (cs Cluster) Init(ctx context.Context, out, errOut io.Writer) {\n\t// this retries until it succeeds, it won't return unless it does\n\te2e.Run(ctx, out, errOut, \"./cockroach\",\n\t\t\"init\",\n\t\t\"--insecure\",\n\t\t\"--host=\"+cs[0].Addr,\n\t)\n}", "func (c *GrpcClient) Init() error {\n\tconn, err := grpc.Dial(c.serverURL, grpc.WithInsecure(), grpc.WithBlock())\n\tif err != nil {\n\t\tlog.Fatalf(\"did not connect %v\", err)\n\t\treturn err\n\t} else {\n\t\tlog.Printf(\"connnect to grpc server at %s\", c.serverURL)\n\t}\n\tc.client = protos.NewClaculatorClient(conn)\n\treturn nil\n}", "func Init(ac *atmi.ATMICtx) int {\n\n\tac.TpLogWarn(\"Doing server init...\")\n\n\t//Advertize service\n\tif err := ac.TpAdvertise(\"CREDIT\", \"CREDIT\", CREDIT); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\", err.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\treturn SUCCEED\n}", "func (svr *Server) Init(maxConns int) (err error) {\n\tsvr.LineMgr, err = NewMLLineMgr(maxConns)\n\tif err != nil {\n\t\tutils.Logger.Error(\"New Magline Connection Pool Error!\")\n\t\treturn\n\t}\n\treturn\n}", "func InitClient(host string, port string, prefix string, globalTags []string) error {\n\tvar err error\n\t// WithMaxBytesPerPayload optimal value is 1432, stacktrace is bigger than that, so remove it for keeping safe\n\tc, err = statsd.New(host+\":\"+port, statsd.WithMaxBytesPerPayload(maxBytesPerPayload))\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.Tags = append(c.Tags, globalTags...)\n\tc.Namespace = prefix + \".\"\n\tc.Incr(\"server_start\", []string(nil), 1)\n\treturn nil\n}", "func (client *Client) Init() (err error) {\n\tconfig := consul.DefaultConfig()\n\tconfig.Address = client.ConsulAddress\n\tconfig.Datacenter = client.ConsulDatacenter\n\n\tif client.Client, err = consul.NewClient(config); err != nil {\n\t\treturn errors.Wrap(err, \"connecting to consul\")\n\t}\n\tif err = client.Agent().ServiceRegister(&consul.AgentServiceRegistration{\n\t\tID: client.ID,\n\t\tName: client.Name,\n\t\tTags: client.Tags,\n\t\tPort: client.Port,\n\t\tAddress: client.Address,\n\t}); err != nil {\n\t\treturn errors.Wrap(err, \"registering service\")\n\t}\n\tclient.connected = true\n\treturn\n}", "func init() {\n\t// This function will be executed before everything else.\n\t// Do some initialization here.\n\tSBC = data.NewBlockChain()\n\t// When server works\n\t// Peers = data.NewPeerList(Register(), 32)\n\t// While server doesn't work -> use port as id\n\tid, _ := strconv.ParseInt(os.Args[1], 10, 64)\n\tPeers = data.NewPeerList(int32(id), 32)\n\tifStarted = true\n}", "func (c *Client) Init() error {\n\tif !c.initialized {\n\t\tcfg := c.Config\n\t\tlog.Debugf(\"Initializing client with config %+v\", cfg)\n\n\t\tcspDir, err := util.MakeFileAbs(\"csp\", c.HomeDir)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tc.cspDir = cspDir\n\n\t\tkeyDir := filepath.Join(cspDir, \"keystore\")\n\t\terr = os.MkdirAll(keyDir, 0700)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"Failed to create keystore directory\")\n\t\t}\n\t\tc.keyFile = filepath.Join(keyDir, \"key.pem\")\n\n\t\tcertDir := filepath.Join(cspDir, \"signcerts\")\n\t\terr = os.MkdirAll(certDir, 0755)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"Failed to create signcerts directory\")\n\t\t}\n\t\tc.certFile = filepath.Join(certDir, \"cert.pem\")\n\n\t\tc.caCertsDir = filepath.Join(cspDir, \"cacerts\")\n\t\terr = os.MkdirAll(c.caCertsDir, 0755)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"Failed to create cacerts directory\")\n\t\t}\n\n\t\tc.csp, err = util.InitCCCSP(keyDir)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = c.initHTTPClient()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tc.initialized = true\n\t}\n\treturn nil\n}", "func InitServer() *Server {\n\ts := &Server{\n\t\t&goserver.GoServer{},\n\t\t\"logs\",\n\t\tmake(map[string]*logHolder),\n\t\t&sync.Mutex{},\n\t\ttime.Now(),\n\t\t&sync.Mutex{},\n\t\t0,\n\t\t0,\n\t\tmake(map[string]int),\n\t\t&sync.Mutex{},\n\t\t&pb.Config{},\n\t}\n\ts.Register = s\n\treturn s\n}", "func Init() {\n\tonceRest.Do(func() {\n\t\tconf := config.GetConfig()\n\t\tlogger.Log.Info(\"Initializing Rest server\")\n\t\tr := NewRouter()\n\t\tif err := r.Start(conf.GetString(\"general.rest_server_port\")); err != nil {\n\t\t\tlogger.Log.Fatal(\"Unable to bring service up: \" + err.Error())\n\t\t}\n\n\t})\n\n}", "func (c *Client) Init() error {\n\treturn nil\n}", "func (c *Client) Init() error {\n\treturn nil\n}", "func Init(opt ...Option) {\n\tif DefaultServer == nil {\n\t\tDefaultServer = newRpcServer(opt...)\n\t}\n\tDefaultServer.Init(opt...)\n}", "func InitServer(svc *service.Service) *echo.Echo {\n\tmapper := mapper.New()\n\tsvm := service.NewServiceManager()\n\tsrv := echo.New()\n\n\tuserService := svm.UserService(svc)\n\thealthCheckService := svm.HealthCheckService(svc)\n\tcloudVisionService := svm.CloudVisionService(svc)\n\tuploadService := svm.UploadService(svc, cloudVisionService)\n\n\t//CORS\n\tsrv.Use(middleware.CORSWithConfig(middleware.CORSConfig{\n\t\tAllowOrigins: []string{\"*\"},\n\t\tAllowMethods: []string{echo.GET, echo.HEAD, echo.PUT, echo.PATCH, echo.POST, echo.DELETE},\n\t}))\n\n\treturn setupRoutes(srv, &controller{\n\t\tpingController: pingroute.NewController(),\n\t\tuserController: userroute.NewController(userService, mapper),\n\t\tuploadController: uploadroute.NewController(uploadService),\n\t\thealthcheckController: healthcheckroute.NewController(healthCheckService, mapper),\n\t})\n}", "func TcpManagerInit() *TcpManager {\n\tm := &TcpManager{\n\t\tbind: make(chan *TcpClient, 10),\n\t\tunbind: make(chan *TcpClient, 10),\n\t\tclose: make(chan *TcpClient, 10),\n\t\tbindClients: util.NewConcMap(),\n\t\tunbindClients: util.NewConcMap(),\n\t}\n\tants.Submit(m.Run)\n\treturn m\n}", "func Init(opt ...Option) {\r\n\tif DefaultServer == nil {\r\n\t\tDefaultServer = newRpcServer(opt...)\r\n\t}\r\n\tDefaultServer.Init(opt...)\r\n}", "func init() {\n\t// Configure and start the API\n\tgo func() {\n\t\tapp := igcinfo.App{\n\t\t\tListenPort: listenPort}\n\t\tapp.StartServer()\n\t}()\n\n\t// Ensure server is started before continuing\n\ttime.Sleep(1000 * time.Millisecond)\n}", "func Init() *Server {\n\ts := &Server{\n\t\t&goserver.GoServer{},\n\t\t0,\n\t}\n\treturn s\n}", "func (s *srv) Initialize() (err error) {\n\ts.listener, err = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", s.Address, s.Port))\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func Init(config oc.Config, logger *zap.Logger) {\n\t// Create metrics server\n\tcensus, err := config.Build(oc.WithLogger(logger))\n\tif err != nil {\n\t\tlogger.Error(\"Failed building census\", zap.Error(err))\n\t\treturn\n\t}\n\thttp.Handle(\"/metrics\", census.StatsHandler)\n\tgo func() {\n\t\tdefer census.Close()\n\t\thttp.ListenAndServe(\":9100\", nil)\n\t}()\n}", "func (s *Server) init(renew bool) (err error) {\n\ts.Config.Operations.Metrics = s.Config.Metrics\n\ts.Operations = operations.NewSystem(s.Config.Operations)\n\ts.initMetrics()\n\n\tserverVersion := metadata.GetVersion()\n\terr = calog.SetLogLevel(s.Config.LogLevel, s.Config.Debug)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlog.Infof(\"Server Version: %s\", serverVersion)\n\ts.levels, err = metadata.GetLevels(serverVersion)\n\tif err != nil {\n\t\treturn err\n\t}\n\tlog.Infof(\"Server Levels: %+v\", s.levels)\n\n\ts.mux = gmux.NewRouter()\n\t// Initialize the config\n\terr = s.initConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Initialize the default CA last\n\terr = s.initDefaultCA(renew)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Successful initialization\n\treturn nil\n}", "func (plugin *Plugin) Init() error {\n\tplugin.cniServer = newRemoteCNIServer(plugin.Log, plugin.Proxy)\n\tcni.RegisterRemoteCNIServer(plugin.GRPC.Server(), plugin.cniServer)\n\treturn nil\n}", "func InitServer(port int) {\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\t// var opts []grpc.ServerOption\n\tgrpcServer := grpc.NewServer()\n\tproto.RegisterSynchronizationServer(grpcServer, &syncServer{})\n\tgrpcServer.Serve(lis)\n}", "func InitCDServerClientWithRemoteMode() {\n\tCDServer = NewRemoteCDServerClient(\"http://127.0.0.1:8080\")\n}", "func (rtspService *RTSPService) Init(msg *wssapi.Msg) (err error) {\n\tif nil == msg || nil == msg.Param1 {\n\t\tlogger.LOGE(\"init rtsp server failed\")\n\t\treturn errors.New(\"invalid param init rtsp server\")\n\t}\n\tfileName, ok := msg.Param1.(string)\n\tif false == ok {\n\t\tlogger.LOGE(\"bad param init rtsp server\")\n\t\treturn errors.New(\"invalid param init rtsp server\")\n\t}\n\terr = rtspService.loadConfigFile(fileName)\n\tif err != nil {\n\t\tlogger.LOGE(\"load rtsp config failed:\" + err.Error())\n\t\treturn\n\t}\n\treturn\n}", "func InitServer(ip string) Server {\n\tserver := Server{IP: ip, Port: 8090}\n\treturn server\n}", "func (hfc *FabricSetup) Init() {\n\t//adding logger for outut\n\tbackend := logging.NewLogBackend(os.Stderr, \"\", 0)\n\tbackendFormatter := logging.NewBackendFormatter(backend, format)\n\tbackendLeveled := logging.AddModuleLevel(backend)\n\tbackendLeveled.SetLevel(logging.DEBUG, \"\")\n\tlogging.SetBackend(backendLeveled, backendFormatter)\n\tlogger.Info(\"================ Creating New SDK Instance ================\")\n\n\t//initializing SDK\n\tvar config = config.FromFile(hfc.ConfigFileName)\n\tvar err error\n\thfc.Sdk, err = fabsdk.New(config)\n\tif err != nil {\n\t\tlogger.Infof(\"Unable to create new instance of SDk: %s\\n\", err)\n\t}\n\n\t//clean up user data from previous runs\n\tconfigBackend, err := hfc.Sdk.Config()\n\tif err != nil {\n\t\tlogger.Fatal(err)\n\t}\n\n\tcryptoSuiteConfig := cryptosuite.ConfigFromBackend(configBackend)\n\tidentityConfig, err := mspIdentity.ConfigFromBackend(configBackend)\n\tif err != nil {\n\t\tlogger.Fatal(err)\n\t}\n\n\tkeyStorePath := cryptoSuiteConfig.KeyStorePath()\n\tcredentialStorePath := identityConfig.CredentialStorePath()\n\thfc.cleanupPath(keyStorePath)\n\thfc.cleanupPath(credentialStorePath)\n}", "func (m *Mosn) Init(c *v2.MOSNConfig) error {\n\tif c.CloseGraceful {\n\t\tc.DisableUpgrade = true\n\t}\n\tif err := m.inheritConfig(c); err != nil {\n\t\treturn err\n\t}\n\n\tlog.StartLogger.Infof(\"[mosn start] init the members of the mosn\")\n\n\t// after inherit config,\n\t// since metrics need the isFromUpgrade flag in Mosn\n\tm.initializeMetrics()\n\tm.initClusterManager()\n\tm.initServer()\n\n\t// set the mosn config finally\n\tconfigmanager.SetMosnConfig(m.Config)\n\treturn nil\n}", "func (dc *Client) Init() error {\n\tdc.GenericServicePool = make(map[string]*dg.GenericService, 4)\n\n\tcls := config.GetBootstrap().StaticResources.Clusters\n\n\t// dubbogo comsumer config\n\tdgCfg = dg.ConsumerConfig{\n\t\tCheck: new(bool),\n\t\tRegistries: make(map[string]*dg.RegistryConfig, 4),\n\t}\n\tdgCfg.ApplicationConfig = defaultApplication\n\tfor i := range cls {\n\t\tc := cls[i]\n\t\tdgCfg.Request_Timeout = c.RequestTimeoutStr\n\t\tdgCfg.Connect_Timeout = c.ConnectTimeoutStr\n\t\tfor k, v := range c.Registries {\n\t\t\tif len(v.Protocol) == 0 {\n\t\t\t\tlogger.Warnf(\"can not find registry protocol config, use default type 'zookeeper'\")\n\t\t\t\tv.Protocol = defaultDubboProtocol\n\t\t\t}\n\t\t\tdgCfg.Registries[k] = &dg.RegistryConfig{\n\t\t\t\tProtocol: v.Protocol,\n\t\t\t\tAddress: v.Address,\n\t\t\t\tTimeoutStr: v.Timeout,\n\t\t\t\tUsername: v.Username,\n\t\t\t\tPassword: v.Password,\n\t\t\t}\n\t\t}\n\t}\n\n\tinitDubbogo()\n\n\treturn nil\n}", "func (ch *CFHosting) Init() error {\n\n\t// Determine if we are running CF by presence of env var \"VCAP_APPLICATION\" and configure appropriately\n\tif ch.portalProxy.Env().IsSet(VCapApplication) {\n\t\tlog.Info(\"Detected that Console is deployed as a Cloud Foundry Application\")\n\n\t\t// Record that we are deployed in Cloud Foundry\n\t\tch.portalProxy.GetConfig().IsCloudFoundry = true\n\n\t\tch.portalProxy.GetConfig().ConsoleConfig = new(interfaces.ConsoleConfig)\n\n\t\t// We are using the CF UAA - so the Console must use the same Client and Secret as CF\n\t\tch.portalProxy.GetConfig().ConsoleConfig.ConsoleClient = ch.portalProxy.GetConfig().CFClient\n\t\tch.portalProxy.GetConfig().ConsoleConfig.ConsoleClientSecret = ch.portalProxy.GetConfig().CFClientSecret\n\n\t\t//Set the auth endpoint type for the console\n\t\tch.portalProxy.GetConfig().ConsoleConfig.AuthEndpointType = ch.portalProxy.GetConfig().AuthEndpointType\n\n\t\t// Ensure that the identifier for an admin is the standard Cloud Foundry one\n\t\tch.portalProxy.GetConfig().ConsoleConfig.ConsoleAdminScope = ch.portalProxy.GetConfig().CFAdminIdentifier\n\n\t\t// Allow Console Application manifest to override the Admin Scope if really desired\n\t\tstratosAdminScope, ok := ch.portalProxy.Env().Lookup(\"STRATOS_ADMIN_SCOPE\")\n\t\tif ok {\n\t\t\tch.portalProxy.GetConfig().ConsoleConfig.ConsoleAdminScope = stratosAdminScope\n\t\t\tlog.Infof(\"Overriden Console Admin Scope to: %s\", stratosAdminScope)\n\t\t}\n\n\t\t// Need to run as HTTP on the port we were told to use\n\t\tch.portalProxy.GetConfig().HTTPS = false\n\n\t\tport, ok := ch.portalProxy.Env().Lookup(\"PORT\")\n\t\tif ok {\n\t\t\tch.portalProxy.GetConfig().TLSAddress = \":\" + port\n\t\t\tlog.Infof(\"Updated Console address to: %s\", ch.portalProxy.GetConfig().TLSAddress)\n\t\t}\n\n\t\t// Get the cf_api value from the JSON\n\t\tvar appData interfaces.VCapApplicationData\n\t\tvCapApp, _ := ch.portalProxy.Env().Lookup(VCapApplication)\n\t\tdata := []byte(vCapApp)\n\t\terr := json.Unmarshal(data, &appData)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Could not get the Cloud Foundry API URL: %+v\", err)\n\t\t\treturn nil\n\t\t}\n\n\t\tlog.Infof(\"CF API URL: %s\", appData.API)\n\n\t\t// Allow the URL to be overridden by an application environment variable\n\t\tif ch.portalProxy.Env().IsSet(CFApiURLOverride) {\n\t\t\tapiUrl, _ := ch.portalProxy.Env().Lookup(CFApiURLOverride)\n\t\t\tappData.API = apiUrl\n\t\t\tlog.Infof(\"Overriden CF API URL from environment variable %s\", apiUrl)\n\t\t}\n\n\t\tif ch.portalProxy.Env().IsSet(CFApiForceSecure) {\n\t\t\t// Force the API URL protocol to be https\n\t\t\tappData.API = strings.Replace(appData.API, \"http://\", \"https://\", 1)\n\t\t\tlog.Infof(\"Ensuring that CF API URL is accessed over HTTPS\")\n\t\t} else {\n\t\t\tlog.Info(\"No forced override to HTTPS\")\n\t\t}\n\n\t\t// Ephemeral Database indicates if we are running with a DB like SQLite, which is Ephemeral\n\t\t// Only need to do this if the Database we are using is SQLite\n\t\tisSQLite := ch.portalProxy.GetConfig().DatabaseProviderName == SQLiteProviderName\n\t\tdisablePersistenceFeatures := isSQLite\n\t\tif ch.portalProxy.Env().IsSet(ForceEnablePersistenceFeatures) {\n\t\t\t// Force the Endpoint Dashboard to be visible?\n\t\t\tdisablePersistenceFeatures = !ch.portalProxy.Env().MustBool(ForceEnablePersistenceFeatures)\n\t\t\tif disablePersistenceFeatures {\n\t\t\t\tlog.Info(\"Features requiring persistence have been DISABLED\")\n\t\t\t} else {\n\t\t\t\tlog.Info(\"Features requiring persistence have been ENABLED\")\n\t\t\t}\n\t\t}\n\t\tch.portalProxy.GetConfig().PluginConfig[\"disablePersistenceFeatures\"] = strconv.FormatBool(disablePersistenceFeatures)\n\t\tlog.Infof(\"Features requiring persistence: enabled: %s\", strconv.FormatBool(!disablePersistenceFeatures))\n\n\t\tlog.Infof(\"Using Cloud Foundry API URL: %s\", appData.API)\n\t\tcfEndpointSpec, _ := ch.portalProxy.GetEndpointTypeSpec(\"cf\")\n\t\tnewCNSI, _, err := cfEndpointSpec.Info(appData.API, true)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Could not get the info for Cloud Foundry: %+v\", err)\n\t\t\treturn nil\n\t\t}\n\n\t\t// Override the configuration to set the authorization endpoint\n\t\turl, err := url.Parse(newCNSI.AuthorizationEndpoint)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Invalid authorization endpoint URL %s %s\", newCNSI.AuthorizationEndpoint, err)\n\t\t}\n\n\t\tch.portalProxy.GetConfig().ConsoleConfig.AuthorizationEndpoint = url\n\n\t\t// Override the configuration to set the authorization endpoint\n\t\turl, err = url.Parse(newCNSI.TokenEndpoint)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Invalid token endpoint URL %s %s\", newCNSI.TokenEndpoint, err)\n\t\t}\n\n\t\tch.portalProxy.GetConfig().ConsoleConfig.UAAEndpoint = url\n\n\t\tlog.Infof(\"Cloud Foundry UAA is: %s\", ch.portalProxy.GetConfig().ConsoleConfig.UAAEndpoint)\n\n\t\t// Not set in the environment and failed to read from the Secrets file\n\t\t// CHECK is this necessary to set here?\n\t\tch.portalProxy.GetConfig().ConsoleConfig.SkipSSLValidation = ch.portalProxy.Env().MustBool(\"SKIP_SSL_VALIDATION\")\n\n\t\tif !ch.portalProxy.Env().IsSet(SkipAutoRegister) {\n\t\t\tlog.Info(\"Setting AUTO_REG_CF_URL config to \", appData.API)\n\t\t\tch.portalProxy.GetConfig().AutoRegisterCFUrl = appData.API\n\t\t} else {\n\t\t\tlog.Infof(\"Skipping auto-register of CF Endpoint - %s is set\", SkipAutoRegister)\n\t\t}\n\n\t\t// Store the space and id of the Console application - we can use these to prevent stop/delete in the front-end\n\t\tif ch.portalProxy.GetConfig().CloudFoundryInfo == nil {\n\t\t\tch.portalProxy.GetConfig().CloudFoundryInfo = &interfaces.CFInfo{}\n\t\t}\n\t\tch.portalProxy.GetConfig().CloudFoundryInfo.SpaceGUID = appData.SpaceID\n\t\tch.portalProxy.GetConfig().CloudFoundryInfo.AppGUID = appData.ApplicationID\n\n\t\tlog.Info(\"All done for Cloud Foundry deployment\")\n\t}\n\treturn nil\n}", "func Init(config *cfg.Config, logger log.Logger, conns proxy.AppConns) *RelayController {\n\tinitOnce.Do(func() {\n\t\ttemp := strings.Split(config.RPC.ListenAddress, \":\")\n\t\tlocalURL := \"http://127.0.0.1:\" + temp[len(temp)-1]\n\n\t\tgRelay = &RelayController{\n\t\t\tLocalURL: localURL,\n\t\t\tcurrentNodeAddress: getNodeAddress(config, \"\", \"\", 0),\n\t\t\tconfig: config,\n\t\t\tabciClient: conns,\n\t\t\tlogger: logger,\n\t\t}\n\n\t\tgRelay.init()\n\n\t\tlogger.Info(\"RELAY init\", \"gRelay\", gRelay)\n\t})\n\n\treturn gRelay\n}", "func (ca *CA) Init(cfg *config.Config) (*CA, error) {\n\t// Set password, it's ok to set nil password, the ca will prompt for them if\n\t// they are required.\n\topts := []authority.Option{\n\t\tauthority.WithPassword(ca.opts.password),\n\t\tauthority.WithSSHHostPassword(ca.opts.sshHostPassword),\n\t\tauthority.WithSSHUserPassword(ca.opts.sshUserPassword),\n\t\tauthority.WithIssuerPassword(ca.opts.issuerPassword),\n\t}\n\tif ca.opts.linkedCAToken != \"\" {\n\t\topts = append(opts, authority.WithLinkedCAToken(ca.opts.linkedCAToken))\n\t}\n\n\tif ca.opts.database != nil {\n\t\topts = append(opts, authority.WithDatabase(ca.opts.database))\n\t}\n\n\tif ca.opts.quiet {\n\t\topts = append(opts, authority.WithQuietInit())\n\t}\n\n\twebhookTransport := http.DefaultTransport.(*http.Transport).Clone()\n\topts = append(opts, authority.WithWebhookClient(&http.Client{Transport: webhookTransport}))\n\n\tauth, err := authority.New(cfg, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tca.auth = auth\n\n\ttlsConfig, clientTLSConfig, err := ca.getTLSConfig(auth)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\twebhookTransport.TLSClientConfig = clientTLSConfig\n\n\t// Using chi as the main router\n\tmux := chi.NewRouter()\n\thandler := http.Handler(mux)\n\n\tinsecureMux := chi.NewRouter()\n\tinsecureHandler := http.Handler(insecureMux)\n\n\t// Add HEAD middleware\n\tmux.Use(middleware.GetHead)\n\tinsecureMux.Use(middleware.GetHead)\n\n\t// Add regular CA api endpoints in / and /1.0\n\tapi.Route(mux)\n\tmux.Route(\"/1.0\", func(r chi.Router) {\n\t\tapi.Route(r)\n\t})\n\n\t// Mount the CRL to the insecure mux\n\tinsecureMux.Get(\"/crl\", api.CRL)\n\tinsecureMux.Get(\"/1.0/crl\", api.CRL)\n\n\t// Add ACME api endpoints in /acme and /1.0/acme\n\tdns := cfg.DNSNames[0]\n\tu, err := url.Parse(\"https://\" + cfg.Address)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tport := u.Port()\n\tif port != \"\" && port != \"443\" {\n\t\tdns = fmt.Sprintf(\"%s:%s\", dns, port)\n\t}\n\n\t// ACME Router is only available if we have a database.\n\tvar acmeDB acme.DB\n\tvar acmeLinker acme.Linker\n\tif cfg.DB != nil {\n\t\tacmeDB, err = acmeNoSQL.New(auth.GetDatabase().(nosql.DB))\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"error configuring ACME DB interface\")\n\t\t}\n\t\tacmeLinker = acme.NewLinker(dns, \"acme\")\n\t\tmux.Route(\"/acme\", func(r chi.Router) {\n\t\t\tacmeAPI.Route(r)\n\t\t})\n\t\t// Use 2.0 because, at the moment, our ACME api is only compatible with v2.0\n\t\t// of the ACME spec.\n\t\tmux.Route(\"/2.0/acme\", func(r chi.Router) {\n\t\t\tacmeAPI.Route(r)\n\t\t})\n\t}\n\n\t// Admin API Router\n\tif cfg.AuthorityConfig.EnableAdmin {\n\t\tadminDB := auth.GetAdminDatabase()\n\t\tif adminDB != nil {\n\t\t\tacmeAdminResponder := adminAPI.NewACMEAdminResponder()\n\t\t\tpolicyAdminResponder := adminAPI.NewPolicyAdminResponder()\n\t\t\twebhookAdminResponder := adminAPI.NewWebhookAdminResponder()\n\t\t\tmux.Route(\"/admin\", func(r chi.Router) {\n\t\t\t\tadminAPI.Route(\n\t\t\t\t\tr,\n\t\t\t\t\tadminAPI.WithACMEResponder(acmeAdminResponder),\n\t\t\t\t\tadminAPI.WithPolicyResponder(policyAdminResponder),\n\t\t\t\t\tadminAPI.WithWebhookResponder(webhookAdminResponder),\n\t\t\t\t)\n\t\t\t})\n\t\t}\n\t}\n\n\tvar scepAuthority *scep.Authority\n\tif ca.shouldServeSCEPEndpoints() {\n\t\tscepPrefix := \"scep\"\n\t\tscepAuthority, err = scep.New(auth, scep.AuthorityOptions{\n\t\t\tService: auth.GetSCEPService(),\n\t\t\tDNS: dns,\n\t\t\tPrefix: scepPrefix,\n\t\t})\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"error creating SCEP authority\")\n\t\t}\n\n\t\t// According to the RFC (https://tools.ietf.org/html/rfc8894#section-7.10),\n\t\t// SCEP operations are performed using HTTP, so that's why the API is mounted\n\t\t// to the insecure mux.\n\t\tinsecureMux.Route(\"/\"+scepPrefix, func(r chi.Router) {\n\t\t\tscepAPI.Route(r)\n\t\t})\n\n\t\t// The RFC also mentions usage of HTTPS, but seems to advise\n\t\t// against it, because of potential interoperability issues.\n\t\t// Currently I think it's not bad to use HTTPS also, so that's\n\t\t// why I've kept the API endpoints in both muxes and both HTTP\n\t\t// as well as HTTPS can be used to request certificates\n\t\t// using SCEP.\n\t\tmux.Route(\"/\"+scepPrefix, func(r chi.Router) {\n\t\t\tscepAPI.Route(r)\n\t\t})\n\t}\n\n\t// helpful routine for logging all routes\n\t//dumpRoutes(mux)\n\t//dumpRoutes(insecureMux)\n\n\t// Add monitoring if configured\n\tif len(cfg.Monitoring) > 0 {\n\t\tm, err := monitoring.New(cfg.Monitoring)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\thandler = m.Middleware(handler)\n\t\tinsecureHandler = m.Middleware(insecureHandler)\n\t}\n\n\t// Add logger if configured\n\tif len(cfg.Logger) > 0 {\n\t\tlogger, err := logging.New(\"ca\", cfg.Logger)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\thandler = logger.Middleware(handler)\n\t\tinsecureHandler = logger.Middleware(insecureHandler)\n\t}\n\n\t// Create context with all the necessary values.\n\tbaseContext := buildContext(auth, scepAuthority, acmeDB, acmeLinker)\n\n\tca.srv = server.New(cfg.Address, handler, tlsConfig)\n\tca.srv.BaseContext = func(net.Listener) context.Context {\n\t\treturn baseContext\n\t}\n\n\t// only start the insecure server if the insecure address is configured\n\t// and, currently, also only when it should serve SCEP endpoints.\n\tif ca.shouldServeInsecureServer() {\n\t\t// TODO: instead opt for having a single server.Server but two\n\t\t// http.Servers handling the HTTP and HTTPS handler? The latter\n\t\t// will probably introduce more complexity in terms of graceful\n\t\t// reload.\n\t\tca.insecureSrv = server.New(cfg.InsecureAddress, insecureHandler, nil)\n\t\tca.insecureSrv.BaseContext = func(net.Listener) context.Context {\n\t\t\treturn baseContext\n\t\t}\n\t}\n\n\treturn ca, nil\n}", "func (c *ClusterManager) Init(zl instances.ZoneLister, pp backends.ProbeProvider) {\n\tc.instancePool.Init(zl)\n\tc.backendPool.Init(pp)\n\t// TODO: Initialize other members as needed.\n}", "func Initialize() error {\n\tenv, err := Env()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Println(\"Enviroment: \", env)\n\n\tconf, err := config.NewConfig(env)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdbInstances, err := db.NewInitializedInstances(conf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tgrpcCons, err := grpcPkg.NewInitializeConnections(conf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = StartServers(conf, dbInstances, grpcCons)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func Init() {\n\tr := router()\n\tr.Run(\":3001\")\n}", "func Init(opt ...Option) {\n\tif DefaultServer == nil {\n\t\tDefaultServer = newGinServer(opt...)\n\t}\n\tDefaultServer.Init(opt...)\n}", "func InitServer() Server {\n\tserver := Server{&goserver.GoServer{}, ssdp.MakeManager()}\n\tserver.Register = server\n\n\treturn server\n}", "func Init(port int, in <-chan messages.LocalMsg, out chan<- messages.LocalMsg) {\n\t//golog.SetAllLoggers(golog.LevelDebug)\n\tgob.Register(blockchain.Block{})\n\tgob.Register(helloData{})\n\tgob.Register(peerData{})\n\tserver = newTCPServer(port, in, out)\n\tgossipNdxs = make([]int, gossipSize)\n\tserver.start()\n}", "func (h *csiService) init() error {\n\tvar err error\n\th.ops = h\n\tos.Remove(h.Socket)\n\tl, err := net.Listen(\"unix\", h.Socket)\n\tif err != nil {\n\t\treturn err\n\t}\n\th.listener = l\n\th.server = grpc.NewServer()\n\treturn nil\n}", "func InitializeServer(wg *sync.WaitGroup) {\n\n\tServerLoggerInitialize(\"seelog.xml\")\n\n\tConfigInitialize(\"config.ini\")\n\n\tHandlerInitialize()\n\n\tWorkerInitialize()\n\n\truntime.GOMAXPROCS(runtime.NumCPU())\n\n\t//초기화 완료 처리\n\twg.Done()\n}", "func (c *Client) InitTLS(certFile string) error {\n\tserverCert, err := ioutil.ReadFile(certFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tCA_Pool := x509.NewCertPool()\n\tCA_Pool.AppendCertsFromPEM(serverCert)\n\tc.mutex.Lock()\n\tc.rootCAs = CA_Pool\n\tc.mutex.Unlock()\n\tif c.agentProvider != nil {\n\t\treturn c.agentProvider.Refresh()\n\t}\n\treturn nil\n}", "func (s *Server) Init(identityProvider identity.Provider) error {\n\ts.backoffHandler = NewBackoffHandler(s)\n\n\tif identityProvider == nil {\n\t\treturn fmt.Errorf(\"Error initializing identity provider\")\n\t}\n\ts.identityProvider = identityProvider\n\n\tif err := identityProvider.Init(); err != nil {\n\t\tlog.Println(\"Error initializing jwt store:\", err)\n\t\treturn err\n\t}\n\ts.initialized = true\n\treturn nil\n}", "func initServer() {\n\trouter := mux.NewRouter()\n\trouter.HandleFunc(\"/consumables\", consumablesListHandler).Methods(\"GET\")\n\trouter.HandleFunc(\"/consumables\", optionsHandler).Methods(\"OPTIONS\")\n\trouter.HandleFunc(\"/consumables\", consumablesCreateHandler).Methods(\"POST\")\n\trouter.HandleFunc(\"/ingest\", optionsHandler).Methods(\"OPTIONS\")\n\trouter.HandleFunc(\"/ingest\", ingestHandler).Methods(\"POST\")\n\trouter.HandleFunc(\"/status/now\", statusHandler).Methods(\"GET\")\n\trouter.HandleFunc(\"/status/time\", statusTimeHandler).Methods(\"GET\")\n\thttp.Handle(\"/\", router)\n\thttp.ListenAndServe(fmt.Sprintf(\":%s\", os.Getenv(\"CAFFEINE_PORT\")), nil)\n}", "func (s *Server) Init() {\n\tif s.isInitialized {\n\t\tpanic(ErrAlreadyInitialized)\n\t}\n\n\t// If the repos are still missing, use the default implementation: AWS\n\tif s.options.documentRepo == nil || s.options.projectRepo == nil {\n\t\ts.With(AWS(\"eu-west-1\"))\n\t}\n\n\ts.projectHandler = project.Handler{\n\t\tProjectRepository: s.options.projectRepo,\n\t\tDocumentRepository: s.options.documentRepo,\n\t}\n\n\ts.documentHandler = document.Handler{\n\t\tDocumentRepository: s.options.documentRepo,\n\t}\n\n\t// Create router.\n\ts.router = chi.NewRouter()\n\n\t// Add middlewares.\n\ts.router.Use(cors.Handler(cors.Options{\n\t\tAllowedOrigins: []string{\"*\"},\n\t\tAllowedMethods: []string{\"GET\", \"POST\", \"PUT\", \"DELETE\", \"OPTIONS\"},\n\t\tAllowedHeaders: []string{\"*\"},\n\t\tExposedHeaders: []string{\"Link\"},\n\t\tAllowCredentials: false,\n\t\tMaxAge: 300, // Maximum value not ignored by any of major browsers\n\t}))\n\ts.router.Use(middleware.Logger)\n\n\t// Add routes.\n\ts.setupRoutes()\n\ts.isInitialized = true\n}", "func init() {\r\n// get the arguments to run the server\r\n\tflag.StringVar(&Host,\"httpserver\",DEFAULT_HOST,\"name of HTTP server\")\r\n\tflag.IntVar(&Port,\"port\",DEFAULT_PORT,\"port number\")\r\n\tflag.StringVar(&UrlPath,\"urlpath\",DEFAULT_URLPATH,\"relative url path\")\r\n\tflag.StringVar(&SecretKey,\"key\",DEFAULT_KEY,\"secret key to terminate program via TCP/UDP port\")\r\n\tflag.BoolVar(&isVerbose,\"verbose\",false,\"enable verbose logging output\")\r\n\tflag.Parse()\r\n\tlogger.Print(\"Starting servers on Port:\"+strconv.Itoa(Port)+\" HTTP-server:\"+Host+\" urlpath:\"+UrlPath+\" Key:\"+SecretKey)\r\n\tinitConf()\r\n}", "func Init(c *conf.Config, s *service.Service) {\n\trelationSvc = s\n\tverify = v.New(c.Verify)\n\tanti = antispam.New(c.Antispam)\n\taddFollowingRate = rate.New(c.AddFollowingRate)\n\t// init inner router\n\tengine := bm.DefaultServer(c.BM)\n\tsetupInnerEngine(engine)\n\tif err := engine.Start(); err != nil {\n\t\tlog.Error(\"engine.Start() error(%v)\", err)\n\t\tpanic(err)\n\t}\n}", "func (c *CentralCacheTestImpl) Init(conf Config) {\n\tc.baseUrl = conf.Host\n\tc.keyPrefix = conf.KeyPrefix\n\tc.dumpFilePath = conf.DumpFilePath\n\tc.expirySec = conf.ExpirySec\n\tc.file = nil\n}", "func Init(localCluster, remoteCluster discoveryv1alpha1.ClusterIdentity, nodeName, nodeIP string) {\n\tLocalCluster = localCluster\n\tRemoteCluster = remoteCluster\n\n\tLiqoNodeName = nodeName\n\tLiqoNodeIP = nodeIP\n\tStartTime = time.Now().Truncate(time.Second)\n\n\t// The kubernetes service port is directly retrieved from the corresponding environment variable,\n\t// since it is the one used locally. In case it is not found, it is defaulted to 443.\n\tKubernetesServicePort = os.Getenv(\"KUBERNETES_SERVICE_PORT\")\n\tif KubernetesServicePort == \"\" {\n\t\tKubernetesServicePort = \"443\"\n\t}\n}", "func AgentInit(version, build string) error {\n\tagent := Agent{}\n\tagent.trapSignal()\n\tconf.init(version, build)\n\n\t// containers dir creation\n\tif err := os.MkdirAll(containersDataDir, 0666); err != nil {\n\t\treturn fmt.Errorf(\"Unable to create container data directory: %s\", err)\n\t}\n\n\t// NATS Connect\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Unable to get hostname: %s\", err)\n\t}\n\tagent.natsStreaming = ns.NewClient(ns.DefaultURL, ns.ClusterID, os.Args[0]+\"-\"+hostname, time.Minute)\n\tif err = agent.natsStreaming.Connect(); err != nil {\n\t\treturn err\n\t}\n\n\t// Connection to Docker\n\tagent.dock = docker.NewClient(conf.dockerEngine, docker.DefaultVersion)\n\tif err = agent.dock.Connect(); err != nil {\n\t\t_ = agent.natsStreaming.Close()\n\t\treturn err\n\t}\n\tlog.Println(\"Connected to Docker-engine\")\n\n\tlog.Println(\"Extracting containers list...\")\n\tagent.containers = make(map[string]*ContainerData)\n\tContainerListOptions := types.ContainerListOptions{All: true}\n\tcontainers, err := agent.dock.GetClient().ContainerList(context.Background(), ContainerListOptions)\n\tif err != nil {\n\t\t_ = agent.natsStreaming.Close()\n\t\treturn err\n\t}\n\tfor _, cont := range containers {\n\t\tagent.addContainer(cont.ID)\n\t}\n\tlog.Println(\"done\")\n\tagent.start()\n\treturn nil\n}", "func Init() error {\n\tif confPath == \"\" {\n\t\treturn errors.New(\"server conf not found\")\n\t}\n\n\t_, err := toml.DecodeFile(confPath, Conf)\n\treturn err\n}", "func Init(store *store.Store) *Server {\n\ts := &Server{\n\t\tstore: store,\n\t}\n\n\ts.setupEchoServer()\n\ts.setupEchoMiddleware()\n\n\tInitUI(s)\n\tInitApis(s)\n\n\treturn s\n}", "func CloudantInit() {\n\n\tuser := os.Getenv(\"CLOUDANT_USER_NAME\")\n\tpassword := os.Getenv(\"CLOUDANT_PASSWORD\")\n\t\n\tvar err error\n\n\tlog.Println(\"connecting to cloudant\")\n\n\t// create the client\n\tClient, err = cloudant.NewClient(user, password)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tlog.Println(\"successfully connected\")\n\n}", "func (c *MainChannelCC) Init(stub shim.ChaincodeStubInterface) pb.Response {\n\treturn shim.Success(nil)\n}", "func Init(name string) {\n\tonce.Do(func() {\n\t\ti, err := New(WithServerName(name))\n\t\tif err != nil {\n\t\t\tlog.Init()\n\t\t\t// skipcq: RVV-A0003\n\t\t\tlog.Fatal(errors.ErrFailedToInitInfo(err))\n\t\t}\n\t\tinfoProvider = i\n\t})\n}", "func (a *ApiServer) Initialize(config map[string]interface{}) *ApiServer{\n//func (a *ApiServer) Initialize() *ApiServer{\n\ta.Ssl.InitializeCertificateAuthority()\n\ta.Router = mux.NewRouter()\n\treturn a\n}", "func (p *P2PServerV1) Init(ctx *nctx.NetCtx) error {\n\tpool, err := NewConnPool(ctx)\n\tif err != nil {\n\t\tp.log.Error(\"Init P2PServerV1 NewConnPool error\", \"error\", err)\n\t\treturn err\n\t}\n\n\tp.ctx = ctx\n\tp.log = ctx.GetLog()\n\tp.config = ctx.P2PConf\n\tp.pool = pool\n\tp.dispatcher = p2p.NewDispatcher(ctx)\n\n\t// address\n\tp.address, err = multiaddr.NewMultiaddr(ctx.P2PConf.Address)\n\tif err != nil {\n\t\tlog.Printf(\"network address error: %v\", err)\n\t\treturn ErrAddressIllegal\n\t}\n\n\t_, _, err = manet.DialArgs(p.address)\n\tif err != nil {\n\t\tlog.Printf(\"network address error: %v\", err)\n\t\treturn ErrAddressIllegal\n\t}\n\n\t// account\n\tkeyPath := ctx.EnvCfg.GenDataAbsPath(ctx.EnvCfg.KeyDir)\n\tp.account, err = xaddress.LoadAddress(keyPath)\n\tif err != nil {\n\t\tp.log.Error(\"load account error\", \"path\", keyPath)\n\t\treturn ErrLoadAccount\n\t}\n\tp.accounts = cache.New(cache.NoExpiration, cache.NoExpiration)\n\n\tp.bootNodes = make([]string, 0)\n\tp.staticNodes = make(map[string][]string, 0)\n\tp.dynamicNodes = make([]string, 0)\n\n\treturn nil\n}", "func InitCNIServer(netplugin *plugin.NetPlugin) error {\n\n\tnetPlugin = netplugin\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\tlog.Fatalf(\"Could not retrieve hostname: %v\", err)\n\t}\n\n\tpluginHost = hostname\n\n\t// Set up the api client instance\n\tkubeAPIClient = setUpAPIClient()\n\tif kubeAPIClient == nil {\n\t\tlog.Fatalf(\"Could not init kubernetes API client\")\n\t}\n\n\tlog.Debugf(\"Configuring router\")\n\n\trouter := mux.NewRouter()\n\n\t// register handlers for cni\n\tt := router.Headers(\"Content-Type\", \"application/json\").Methods(\"POST\").Subrouter()\n\tt.HandleFunc(cniapi.EPAddURL, makeHTTPHandler(addPod))\n\tt.HandleFunc(cniapi.EPDelURL, makeHTTPHandler(deletePod))\n\tt.HandleFunc(\"/ContivCNI.{*}\", unknownAction)\n\n\tdriverPath := cniapi.ContivCniSocket\n\tos.Remove(driverPath)\n\tos.MkdirAll(cniapi.PluginPath, 0700)\n\n\tgo func() {\n\t\tl, err := net.ListenUnix(\"unix\", &net.UnixAddr{Name: driverPath, Net: \"unix\"})\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tlog.Infof(\"k8s plugin listening on %s\", driverPath)\n\t\thttp.Serve(l, router)\n\t\tl.Close()\n\t\tlog.Infof(\"k8s plugin closing %s\", driverPath)\n\t}()\n\n\t//InitKubServiceWatch(netplugin)\n\treturn nil\n}", "func (client *Client) Init() {\n\tclient.coAckChannel = make(chan CoAck)\n\n\tclient.Tag = true\n\n\tif client.Id == \"\" {\n\t\tclient.Id = uuid.New()\n\t}\n\tclient.RWMutex.Init(\"client_\" + client.Id)\n\n\tif client.RegTime.IsZero() {\n\t\tclient.RegTime = time.Now()\n\t}\n\tif client.Apps == nil {\n\t\tclient.Apps = []string{}\n\t}\n\tif client.Skip_work == nil {\n\t\tclient.Skip_work = []string{}\n\t}\n\n\tclient.Assigned_work.Init(\"Assigned_work\")\n\n\tclient.Current_work.Init(\"Current_work\")\n\n}", "func (s *Server) Initialize(log logging.Logger, factory logging.Factory, host string, port uint16) {\n\ts.log = log\n\ts.factory = factory\n\ts.listenAddress = fmt.Sprintf(\"%s:%d\", host, port)\n\ts.router = newRouter()\n}", "func (d *Config) Init() {\n\td.Type = \"SRV\"\n\td.RefreshInterval = toml.Duration(30 * time.Second)\n}", "func Init() {\n\tgo start(\"80\")\n}", "func (zk *ZookeeperMaster) Init() error {\n\tzk.isMaster = false\n\t//create connection to zookeeper\n\tzk.client = zkclient.NewZkClient(zk.zkHosts)\n\tif err := zk.client.ConnectEx(time.Second * 10); err != nil {\n\t\treturn fmt.Errorf(\"Init failed when connect zk, %s\", err.Error())\n\t}\n\treturn nil\n}", "func Init() {\n\tif initialized {\n\t\treturn\n\t}\n\tinitialized = true\n\tpopulatecnamechain()\n\tensureresourcefinder()\n\tloadphantomjs()\n}", "func Init(s *grpc.Server) {\n\tpb.RegisterMessageServer(s, &srv.Message{})\n}", "func (r *Ricochet) Init() {\n\tr.newconns = make(chan *OpenConnection)\n\tr.networkResolver = utils.NetworkResolver{}\n\tr.rni = new(utils.RicochetNetwork)\n}", "func Init() {\n\tl, err := net.ListenPacket(\"udp\", \":\"+util.LISTEN_PORT)\n\tif err != nil {\n\t\tlog.Exit(err)\n\t}\n\tLocalServer = &Server{\n\t\tServer: l,\n\t\tFriends: Settings.Friends}\n\tLocalServer.Sender, err = netchan.NewExporter(\"tcp\", \":0\")\n\tif err != nil {\n\t\tlog.Exit(err)\n\t}\n\tsAdd := LocalServer.Sender.Addr().String()\n\tsPort, err := strconv.Atoi(sAdd[strings.LastIndex(sAdd, \":\"):])\n\tif strings.LastIndex(sAdd, \":\") == -1 || err != nil {\n\t\tlog.Exit(\"address binding failure\")\n\t}\n\tMe = &database.Friend{\n\t\tName: Settings.ProfileName,\n\t\tTempHashKey: crypt.Md5(crypt.GenerateKey(util.SECRET_KEY_SIZE)),\n\t\tSenderPort: sPort}\n}", "func (c *Config) Init() {\n\tif c.DNSNames == nil {\n\t\tc.DNSNames = []string{\"localhost\", \"127.0.0.1\", \"::1\"}\n\t}\n\tif c.TLS == nil {\n\t\tc.TLS = &DefaultTLSOptions\n\t}\n\tif c.AuthorityConfig == nil {\n\t\tc.AuthorityConfig = &AuthConfig{}\n\t}\n\tif c.CommonName == \"\" {\n\t\tc.CommonName = \"Step Online CA\"\n\t}\n\tif c.CRL != nil && c.CRL.Enabled && c.CRL.CacheDuration == nil {\n\t\tc.CRL.CacheDuration = DefaultCRLCacheDuration\n\t}\n\tc.AuthorityConfig.init()\n}", "func (s *Server) initializeCA() error {\n\t// Bail if connect isn't enabled.\n\tif !s.config.ConnectEnabled {\n\t\treturn nil\n\t}\n\n\tconf, err := s.initializeCAConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Initialize the provider based on the current config.\n\tprovider, err := s.createCAProvider(conf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn s.initializeRootCA(provider, conf)\n}", "func (c *Client) Init() (err error) {\n\tc.initOnce.Do(func() {\n\t\terr = c.init()\n\t})\n\n\treturn\n}", "func (b *BeaconClient) Init(config interface{}) error {\n\tif runtime.GOOS == \"windows\" {\n\t\tlog.Fatal(\"Beacon is not compatible with windows OS\")\n\t}\n\n\tb.beacon = beacon.New()\n\tb.beacon.NoEcho()\n\tb.beacon.SetPort(b.Port).SetInterval(time.Duration(b.Interval) * time.Second)\n\tb.SubscribeChan = make(chan string)\n\n\tb.payloadHandlers = payloadMap{\n\t\tpayloadHandlers: make(map[string]PayloadHandler),\n\t}\n\n\treturn nil\n}", "func (s *Server) Init(addr string, db Store) error {\n\tvar err error\n\n\ts.l, err = net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error start server: %v\\n\", err)\n\t}\n\tlog.Println(\"Server start \", addr)\n\n\tif db == nil {\n\t\treturn fmt.Errorf(\"Store not initializate\\n\")\n\t}\n\ts.db = db\n\n\treturn err\n}", "func (s *SVFS) Init() (err error) {\n\t// Copy storage URL option\n\toverloadStorageURL := SwiftConnection.StorageUrl\n\n\t// Hubic special authentication\n\tif HubicAuthorization != \"\" && HubicRefreshToken != \"\" {\n\t\tSwiftConnection.Auth = new(HubicAuth)\n\t}\n\n\t// Start directory lister\n\tDirectoryLister.Start()\n\n\t// Authenticate if we don't have a token and storage URL\n\tif !SwiftConnection.Authenticated() {\n\t\terr = SwiftConnection.Authenticate()\n\t}\n\n\t// Swift ACL special authentication\n\tif overloadStorageURL != \"\" {\n\t\tSwiftConnection.StorageUrl = overloadStorageURL\n\t\tSwiftConnection.Auth = newSwiftACLAuth(SwiftConnection.Auth, overloadStorageURL)\n\t}\n\n\t// Data encryption\n\tif Encryption {\n\t\tCipher, err = newCipher(Key)\n\t}\n\n\treturn err\n}", "func initServer(ctx context.Context, n *Node) error {\n\n\tif n.index < int32(len(n.config.Nodes)) {\n\n\t\tle := n.config.Nodes[n.index]\n\t\tvar listener net.Listener\n\n\t\terr := backoff.Retry(\n\t\t\tfunc() error {\n\t\t\t\tvar err error\n\t\t\t\tlistener, err = net.Listen(\"tcp\", le)\n\t\t\t\treturn err\n\t\t\t},\n\t\t\tbackoff.WithMaxRetries(backoff.NewExponentialBackOff(), 3),\n\t\t)\n\n\t\t// listener, err := net.Listen(\"tcp\", le)\n\t\tif err != nil {\n\t\t\terr = raftErrorf(err, \"failed to acquire local TCP socket for gRPC\")\n\t\t\tn.logger.Errorw(\"initServer failed (some other application or previous instance still using socket?)\",\n\t\t\t\traftErrKeyword, err)\n\t\t\treturn err\n\t\t}\n\n\t\ts := &raftServer{\n\t\t\tnode: n,\n\t\t\tlocalListener: listener,\n\t\t\tlocalAddr: le,\n\t\t}\n\n\t\tn.messaging.server = s\n\t\tn.logger.Debugw(\"listener acquired local node address\", s.logKV()...)\n\n\t} else {\n\n\t\terr := raftErrorf(\n\t\t\tRaftErrorServerNotSetup, \"LocalNodeIndex in out of bounds of cluster Nodes configuration\")\n\t\tn.logger.Errorw(\"initServer failed\", raftErrKeyword, err)\n\t\treturn err\n\n\t}\n\n\treturn nil\n}", "func (s *Server) Init(opts ...ServerOptions) {\n\tif s.options == nil {\n\t\ts.options = &serverOption{}\n\t}\n\tfor _, opt := range opts {\n\t\topt(s.options)\n\t}\n\t\n}", "func (c *Client) InitTLS(certFile string) error {\n\tserverCert, err := ioutil.ReadFile(certFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tCA_Pool := x509.NewCertPool()\n\tCA_Pool.AppendCertsFromPEM(serverCert)\n\tc.tlsConfig = &tls.Config{RootCAs: CA_Pool}\n\treturn nil\n}", "func Init() {\n\tdocker.Init()\n\thost.Init()\n\tlabel.Init()\n\tospackages.Init()\n\tdiff.Init()\n\tcontainer.Init()\n}", "func (c *Configurations) Init() error {\n\tc.Version = Version\n\tc.Location = \"Local\"\n\tc.Debug = Debug\n\n\t// server\n\tc.Server = &Server{}\n\tc.Server.Init()\n\n\t// redis init\n\tc.RedisConf = &RedisConf{}\n\tc.RedisConf.Init()\n\n\treturn nil\n}", "func Init() (err error) {\n\tvar isAdminEnable *bool = config.GetConfig().Admin.Enable\n\n\tif isAdminEnable != nil && *isAdminEnable == false {\n\t\tlager.Logger.Infof(\"admin api are not enable\")\n\t\treturn nil\n\t}\n\n\terrCh := make(chan error)\n\tmetrics.Init()\n\n\tadminServerURI := config.GetConfig().Admin.ServerURI\n\n\tif adminServerURI == \"\" {\n\t\tadminServerURI = \"0.0.0.0:30102\"\n\t}\n\tln, err := net.Listen(\"tcp\", adminServerURI)\n\tif err != nil {\n\t\treturn\n\t}\n\ttlsConfig, err := getTLSConfig()\n\tif err != nil {\n\t\treturn\n\t}\n\tif tlsConfig != nil {\n\t\tlager.Logger.Infof(\"admin server is using ssl\")\n\t\tln = tls.NewListener(ln, tlsConfig)\n\t} else {\n\t\tlager.Logger.Infof(\"admin server is not using ssl\")\n\t}\n\n\tgo func() {\n\t\tlager.Logger.Infof(\"admin server listening on %s\", ln.Addr().String())\n\t\trestfulWebService := GetWebService()\n\t\tgorestful.Add(&restfulWebService)\n\t\tif err := http.Serve(ln, nil); err != nil {\n\t\t\terrCh <- err\n\t\t\treturn\n\t\t}\n\t}()\n\n\tselect {\n\tcase err = <-errCh:\n\t\tlager.Logger.Warnf(\"got Admin Server Error, err: %v\", err)\n\tcase <-time.After(time.Second):\n\t\tlager.Logger.Infof(\"admin server start success\")\n\t\terr = nil\n\t}\n\treturn\n}", "func Init(addr string) error {\n\tCleanup()\n\tconn, err := grpc.Dial(addr, grpc.WithInsecure())\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Cannot connect to Calcite parser gRPC server: %v\", err)\n\t}\n\tclient = NewCalciteParserClient(conn)\n\treturn nil\n}", "func InitTECDSA(network network.Server) *ProtocolTECDSA {\n\tp := &ProtocolTECDSA{network: network}\n\treturn p\n}", "func (c *Client) Init(\n\tctx context.Context,\n) error {\n\tc.httpClient = client.Default(ctx)\n\treturn nil\n}", "func Init(d *db.Database, c config.Config) *Server {\n\tr := mux.NewRouter()\n\th := &http.Server{\n\t\tHandler: r,\n\t\tAddr: c.Web.Addr,\n\t\tWriteTimeout: 15 * time.Second,\n\t\tReadTimeout: 15 * time.Second,\n\t}\n\tsrv := &Server{\n\t\tDB: d,\n\t\trouter: r,\n\t\tHS: h,\n\t\tConfig: c,\n\t}\n\treturn srv\n}", "func Init() (err error) {\n\tif confPath != \"\" {\n\t\treturn local()\n\t}\n\treturn remote()\n}" ]
[ "0.6843309", "0.6839311", "0.6803325", "0.6737106", "0.6717232", "0.6700139", "0.66713303", "0.6644828", "0.66238827", "0.66130185", "0.6543047", "0.65343547", "0.6528489", "0.64552575", "0.6450744", "0.64250165", "0.6416218", "0.63975966", "0.6397067", "0.6367777", "0.6365296", "0.6348536", "0.6342358", "0.6336888", "0.63021106", "0.63000566", "0.6224047", "0.6222552", "0.6208513", "0.6208513", "0.62033486", "0.6199279", "0.6189688", "0.6168138", "0.616671", "0.6144373", "0.6133665", "0.61318", "0.61302567", "0.61222816", "0.61222047", "0.6111115", "0.6102868", "0.609219", "0.6090363", "0.6087099", "0.60839", "0.60831356", "0.6077795", "0.6072284", "0.6033858", "0.6021338", "0.60088456", "0.60074425", "0.59914136", "0.59890807", "0.5984615", "0.59766376", "0.5975215", "0.59608746", "0.5943174", "0.5922588", "0.59151787", "0.59140867", "0.5906646", "0.5906247", "0.5903423", "0.58970153", "0.58902055", "0.58893424", "0.588923", "0.5888739", "0.58880544", "0.5887428", "0.58793855", "0.58696425", "0.5860388", "0.58516264", "0.58473927", "0.58445543", "0.58424556", "0.5836269", "0.58289945", "0.58209574", "0.58187276", "0.58180934", "0.58166593", "0.58129025", "0.5811002", "0.58075434", "0.5804761", "0.57976073", "0.5793294", "0.5787978", "0.5779733", "0.57776374", "0.5775954", "0.57721704", "0.57511353", "0.57479143", "0.574711" ]
0.0
-1
init initializes the server leaving the DB open
func (s *Server) init(renew bool) (err error) { s.Config.Operations.Metrics = s.Config.Metrics s.Operations = operations.NewSystem(s.Config.Operations) s.initMetrics() serverVersion := metadata.GetVersion() err = calog.SetLogLevel(s.Config.LogLevel, s.Config.Debug) if err != nil { return err } log.Infof("Server Version: %s", serverVersion) s.levels, err = metadata.GetLevels(serverVersion) if err != nil { return err } log.Infof("Server Levels: %+v", s.levels) s.mux = gmux.NewRouter() // Initialize the config err = s.initConfig() if err != nil { return err } // Initialize the default CA last err = s.initDefaultCA(renew) if err != nil { return err } // Successful initialization return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func init() {\n\tconfig.Read()\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n}", "func init() {\n\tconfig.Read()\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n}", "func (p *DatabaseHandler) init(s *Server) error {\n\tdb, err := sql.Open(\"sqlite3\", s.srcDir+\"/database.db\")\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn StringError{\"ERROR: Some of databases weren't opened!\"}\n\t}\n\tp.db = db\n\n\tp.createTable()\n\treturn nil\n}", "func init() {\n\tos.RemoveAll(DataPath)\n\n\tdc := DatabaseConfig{\n\t\tDataPath: DataPath,\n\t\tIndexDepth: 4,\n\t\tPayloadSize: 16,\n\t\tBucketDuration: 3600000000000,\n\t\tResolution: 60000000000,\n\t\tSegmentSize: 100000,\n\t}\n\n\tcfg := &ServerConfig{\n\t\tVerboseLogs: true,\n\t\tRemoteDebug: true,\n\t\tListenAddress: Address,\n\t\tDatabases: map[string]DatabaseConfig{\n\t\t\tDatabase: dc,\n\t\t},\n\t}\n\n\tdbs := map[string]kdb.Database{}\n\tdb, err := dbase.New(dbase.Options{\n\t\tDatabaseName: Database,\n\t\tDataPath: dc.DataPath,\n\t\tIndexDepth: dc.IndexDepth,\n\t\tPayloadSize: dc.PayloadSize,\n\t\tBucketDuration: dc.BucketDuration,\n\t\tResolution: dc.Resolution,\n\t\tSegmentSize: dc.SegmentSize,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdbs[\"test\"] = db\n\td = db\n\to = dc\n\n\ts = NewServer(dbs, cfg)\n\tgo s.Listen()\n\n\t// wait for the server to start\n\ttime.Sleep(time.Second * 2)\n\n\tc = NewClient(Address)\n\tif err := c.Connect(); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (s *Server) Init(renew bool) (err error) {\n\terr = s.init(renew)\n\terr2 := s.closeDB()\n\tif err2 != nil {\n\t\tlog.Errorf(\"Close DB failed: %s\", err2)\n\t}\n\treturn err\n}", "func init() {\n\tlog.Info(\"Initializing database\")\n\tpsqlInfo := fmt.Sprintf(\"host=%s port=%s user=%s \"+\n\t\t\"password=%s dbname=%s sslmode=disable\",\n\t\tconfig.Config().GetString(\"database.host\"),\n\t\tconfig.Config().GetString(\"database.port\"),\n\t\tconfig.Config().GetString(\"database.user\"),\n\t\tconfig.Config().GetString(\"database.password\"),\n\t\tconfig.Config().GetString(\"database.name\"))\n\tdb, err := sql.Open(\"postgres\", psqlInfo)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = db.Ping()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tlog.Info(\"Successfully connected to database!\")\n}", "func init() {\n\tconfig.Read()\n\tdao.DialInfo = &mgo.DialInfo{\n\t\tAddrs: []string{config.Server},\n\t\tDatabase: config.Database,\n\t\tUsername: config.Username,\n\t\tPassword: config.Password,\n\t}\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n\n}", "func init() {\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(os.Getenv(\"BRAIN_DB\")))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tdb = client.Database(dbName)\n}", "func init() {\n\tvar err error\n\tDB, err = gorm.Open(config.MysqlConf.DriverName, config.MysqlConf.Conn)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tDB.DB().SetMaxOpenConns(config.MysqlConf.MaxOpenConns)\n\tDB.DB().SetMaxIdleConns(config.MysqlConf.MaxIdleConns)\n\tDB.DB().SetConnMaxLifetime(time.Duration(config.MysqlConf.ConnMaxLifetime))\n\t//DB.LogMode(true)\n}", "func init() {\n\n\tvar err error\n db, err = sql.Open(\"postgres\",\"user=snake dbname=snake_game sslmode=disable port=26257\")\n\t\n\tif err != nil {\n\t\tlog.Fatal(\"error connecting to the database: \", err, nil)\n\t}\n\n if err != nil {\n\t\tlog.Fatal(\"error connecting to the database: \", err, nil)\n\t}\n}", "func InitDatabase() *Server {\n\tvar err error\n\n\tconnString := getConnString()\n\n\tlog.Printf(\"Setting connection to db with configuration: %s \\n\", connString)\n\n\tserver := &Server{}\n\tserver.db, err = sql.Open(\"sqlserver\", connString)\n\tif err != nil {\n\t\tlog.Fatal(\"Error opening connection: \", err.Error())\n\t}\n\n\tserver.db.SetConnMaxLifetime(time.Minute * 4)\n\n\treturn server\n}", "func init() {\n\tctx, _ := context.WithTimeout(context.Background(), 10*time.Second)\n\tclient, err := mongo.Connect(ctx, options.Client().ApplyURI(CONNECTIONSTRING))\n\n\tif err != nil {\n\t\tlog.Fatal(\"[init]: %s\\n\", err)\n\t}\n\t// Collection types can be used to access the database\n\tdb = client.Database(DBNAME)\n\n}", "func init() {\n\n\tvar err error\n\tdatabase, err = sql.Open(\"mysql\", config.MySQLToFormatDNS())\n\tif err != nil {\n\t\tlog.Fatal(\"==> Error in library/mysql: \" + err.Error())\n\t}\n\n\tdatabase.SetMaxOpenConns(20)\n\tdatabase.SetMaxIdleConns(20)\n\n}", "func init() {\n\tdbconn, err := database.NewPostgreDB(dbType, connStr)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsesconn, err := database.NewRedisCache(addr, pass)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tconfig.dbconn = dbconn\n\tconfig.sessionconn = sesconn\n}", "func init() {\n\t// db, err := sql.Open(\"mysql\", mysql_connect)\n\t// if err != nil {\n\t// \tlog.Fatal(err)\n\t// } else {\n\t// \tlog.Println(\"Successfully connected to mysql database\")\n\t// }\n\t// defer db.Close()\n\n}", "func init(){\n fmt.Printf(\"init Mongo START\\n\")\n session, err := mgo.Dial(\"localhost\")\n if err != nil {\n panic(err)\n }\n /* defer session.Close() */\n // Optional. Switch the session to a monotonic behavior.\n base_config = &Config{\n session: session,\n }\n fmt.Printf(\"init Mongo DONE\\n\")\n}", "func init() {\n\tresetConnection()\n}", "func init() {\n // parser config\n var confFile string\n flag.StringVar(&confFile, \"c\", \"conf/tcpserver.yaml\", \"config file\")\n flag.Parse()\n\n err := utils.ConfParser(confFile, &config)\n if err != nil {\n fmt.Println(\"parser config failed:\", err.Error())\n os.Exit(-1)\n }\n\n // init db\n conninfo := fmt.Sprintf(\"%s:%s@tcp(%s)/%s?charset=utf8\", config.Db.User, config.Db.Passwd, config.Db.Host, config.Db.Db)\n db, err = gorm.Open(\"mysql\", conninfo)\n if err != nil {\n fmt.Println(\"connect to db failed:\", err.Error())\n os.Exit(-1)\n }\n db.DB().SetMaxIdleConns(config.Db.Conn.Maxidle)\n db.DB().SetMaxOpenConns(config.Db.Conn.Maxopen)\n db.LogMode(true)\n}", "func (s *Server) Init(addr string, db Store) error {\n\tvar err error\n\n\ts.l, err = net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Error start server: %v\\n\", err)\n\t}\n\tlog.Println(\"Server start \", addr)\n\n\tif db == nil {\n\t\treturn fmt.Errorf(\"Store not initializate\\n\")\n\t}\n\ts.db = db\n\n\treturn err\n}", "func init() {\n\tcfg = pkg.InitializeConfig()\n\t_, err := pkg.InitializeDb()\n\tif err != nil {\n\t\tpanic(\"failed to initialize db connection : \" + err.Error())\n\t}\n}", "func init() {\n\tdbinfo := fmt.Sprintf(\"user=%s password=%s dbname=%s sslmode=disable\",\n\t\tos.Getenv(\"POSTGRES_USER\"), os.Getenv(\"POSTGRES_PASSWORD\"), DATABASE_NAME)\n\tdb, err := sql.Open(\"postgres\", dbinfo)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tDB = db\n}", "func (s *Server) init() (err error) {\n\tif err = s.Close(); err != nil {\n\t\treturn\n\t}\n\n\tif s.Hostname == \"\" {\n\t\tif s.Hostname, err = os.Hostname(); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn\n}", "func init() {\n\tuser := \"root\"\n\tpass := \"pwpw\"\n\tname := \"itemsDB\"\n\n\tdbconf := user + \":\" + pass + \"@/\" + name\n\tconn, err := sql.Open(\"mysql\", dbconf)\n\tif err != nil {\n\t\tpanic(err.Error)\n\t}\n\tConn = conn\n}", "func init() {\n\tlis = bufconn.Listen(bufSize)\n\tlog.Println(\"server is started\")\n\tserver := grpc.NewServer()\n\tapi.RegisterUserServiceServer(server, &GRPCServer{})\n\n\tdbConnection := postgres.OpenDataBaseConnection()\n\tpostgres.StorageInstance = postgres.NewStorage(dbConnection)\n\tgo func() {\n\t\tif err := server.Serve(lis); err != nil {\n\t\t\tlog.Fatalf(\"Server exited with error: %v\", err)\n\t\t}\n\t}()\n}", "func init() {\n\tconnectionString := os.Getenv(\"CLOUDSQL_CONNECTION\")\n\tdb, err := NewDb(\"mymysql\", connectionString)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// running in google app engine complains that the db gets closed\n\t// by the time we get requests. removing that here but that can't be\n\t// right...\n\n\trouter := NewRouter(db)\n\n\thttp.Handle(\"/\", router)\n}", "func init() {\n\tdao.Server = \"mongodb://shivam:[email protected]:25294/shayona-store\"\n\tdao.Database = \"shayona-store\"\n\tdao.Connect()\n}", "func (db *Database) init() (*gorm.DB, error) {\n\tvar connection, err = gorm.Open(db.Driver, db.getURI())\n\tif err != nil {\n\t\tfmt.Printf(\"✖ Cannot connect to %s database\\n\", db.Driver)\n\t\tlog.Fatal(\"This is the error:\", err)\n\t} else {\n\t\tfmt.Printf(\"⚡ We are connected to the %s database\\n\", db.Driver)\n\t}\n\treturn connection, err\n}", "func DBInit(conStr string) {\n\tif db == nil {\n\t\tvar err error\n\t\tdbConnection, err := gorm.Open(\"mysql\", conStr+\"?charset=utf8&parseTime=True&loc=Local\")\n\t\t// db connection will be closed if there's no request for a while\n\t\t// which would cause 500 error when a new request comes.\n\t\t// diable pool here to avoid this problem.\n\t\tdbConnection.DB().SetMaxIdleConns(0)\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"error\": err.Error(),\n\t\t\t}).Fatal(\"Faile to create db connection pool\")\n\t\t} else {\n\t\t\tlog.WithFields(log.Fields{\n\t\t\t\t\"message\": dbConnection.GetErrors(),\n\t\t\t\t\"db\": conStr,\n\t\t\t}).Info(\"connected to mysql\")\n\t\t}\n\t\tdb = &DB{dbConnection}\n\t}\n\tdb.dbConnect.SetLogger(log.StandardLogger())\n\t// db.Debug message will be logged be logrug with Info level\n\tdb.dbConnect.Debug().AutoMigrate(&Todo{})\n}", "func init() {\n\tlog.Info(\"mysql 链接中。。。\")\n\tvar v dbInfo\n\tv.UserName = \"root\"\n\tv.Port = 3306\n\tv.UserPassword = \"gogocuri\"\n\tv.DbName = \"wanqu2\"\n\tv.Address = \"192.168.0.162\"\n\n\tvar err error\n\tdb, err = gorm.Open(\"mysql\", fmt.Sprintf(\"%s:%s@tcp(%s:%d)/%s?charset=utf8&parseTime=True&loc=Local\",\n\t\tv.UserName, v.UserPassword, v.Address, v.Port, v.DbName))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to connect database:\", err)\n\t}\n\t// 关闭复数表名,如果设置为true,`User`表的表名就会是`user`,而不是`users`\n\tdb.SingularTable(true)\n\t// 启用Logger,显示详细日志\n\tdb.LogMode(true)\n\t//自定义日志\n\tdb.SetLogger(log.NewGormLogger())\n\t//连接池\n\tdb.DB().SetMaxIdleConns(50)\n\tdb.DB().SetMaxOpenConns(200)\n\tlog.Info(\"mysql 链接成功\")\n}", "func initAppDB() {\n\n\t// Init config data\n\tdbConf := GetDBConfig()\n\tdbConf.IsAppDB = true\n\n\tdbPoolApp, err := initSocketConnectionPool(dbConf)\n\tif err != nil {\n\t\tlog.Println(\"initial dbConnApp fail : \", err.Error())\n\t} else {\n\t\tlog.Println(\"initial dbConnApp successful\")\n\t\tdbConf.Conn = dbPoolApp\n\t\tdbConf.InitSuccess = true\n\t}\n\n\tdbConf.Err = err\n\n\t// Keep instance\n\tdbAppConf = dbConf\n}", "func InitDB(setting *domain.GlobalConfig) {\n\tsource := \"\"\n\tswitch setting.DBType {\n\tcase domain.SQLITE3:\n\t\tlogrus.Info(\"InitDB has done when new client, skip.\")\n\t\treturn\n\tcase domain.MYSQL:\n\t\tsource = fmt.Sprintf(\"%s:%s@tcp(%s:%s)/\",\n\t\t\tsetting.DBUser, setting.DBPassword, setting.DBHost, setting.DBPort)\n\tdefault:\n\t\tsource = fmt.Sprintf(\"%s:%s@tcp(%s:%s)/\",\n\t\t\tsetting.DBUser, setting.DBPassword, setting.DBHost, setting.DBPort)\n\t}\n\n\tdb, err := sql.Open(setting.DBType, source)\n\tif err != nil {\n\t\tlogrus.Fatalf(\"connection to db error: %s\", err)\n\t}\n\tdefer db.Close()\n\n\tsql := \"CREATE DATABASE IF NOT EXISTS \" + setting.DBName + \";\"\n\t_, err = db.Exec(sql)\n\tif err != nil {\n\t\tlogrus.Fatalf(\"create db %s error: %v\", setting.DBName, err)\n\t}\n}", "func Init() {\n\t// First it has to connect to my \"server\", that is actually just localhost.\n\tconn, err := sql.Open(\"mysql\", \"root:Naranjo7854@/Economia\")\n\t// Check for errors\n\tif err != nil {\n\n\t\tfmt.Println(\"Error establishing connection with database\", err)\n\t}\n\tdb = conn\n\tfmt.Println(\"Connection created with database\")\n}", "func initLogDB() {\n\n\t// Init config data\n\tdbConf := GetDBConfig()\n\tdbConf.IsAppDB = false\n\n\tdbPoolApp, err := initSocketConnectionPool(dbConf)\n\tif err != nil {\n\t\tlog.Println(\"initial dbConnLog fail : \", err.Error())\n\t} else {\n\t\tlog.Println(\"initial dbConnLog successful\")\n\t\tdbConf.Conn = dbPoolApp\n\t\tdbConf.InitSuccess = true\n\t}\n\n\tdbConf.Err = err\n\n\t// Keep instance\n\tdbLogConf = dbConf\n}", "func init() {\n\n\t//load in environment variables from .env\n\t//will print error message when running from docker image\n\t//because env file is passed into docker run command\n\tenvErr := godotenv.Load(\"/home/ubuntu/go/src/github.com/200106-uta-go/BAM-P2/.env\")\n\tif envErr != nil {\n\t\tif !strings.Contains(envErr.Error(), \"no such file or directory\") {\n\t\t\tlog.Println(\"Error loading .env: \", envErr)\n\t\t}\n\t}\n\n\tvar server = os.Getenv(\"DB_SERVER\")\n\tvar dbPort = os.Getenv(\"DB_PORT\")\n\tvar dbUser = os.Getenv(\"DB_USER\")\n\tvar dbPass = os.Getenv(\"DB_PASS\")\n\tvar db = os.Getenv(\"DB_NAME\")\n\n\t// Build connection string\n\tconnString := fmt.Sprintf(\"server=%s;user id=%s;password=%s;port=%s;database=%s;\", server, dbUser, dbPass, dbPort, db)\n\n\t// Create connection pool\n\tvar err error\n\tdatabase, err = sql.Open(\"sqlserver\", connString)\n\tif err != nil {\n\t\tlog.Fatal(\"Error creating connection pool: \", err.Error())\n\t}\n\tctx := context.Background()\n\terr = database.PingContext(ctx)\n\thttputil.GenericErrHandler(\"error\", err)\n\n\t//create user table if it doesn't exist\n\tstatement, err := database.Prepare(`IF NOT EXISTS (SELECT * FROM sysobjects WHERE name='user_table' and xtype='U') \n\t\tCREATE TABLE user_table (id INT NOT NULL IDENTITY(1,1) PRIMARY KEY, username VARCHAR(255), password VARCHAR(255))`)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t_, err = statement.Exec()\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n}", "func init() {\n\t_ = godotenv.Load()\n\n\thostname := os.Getenv(\"HOST\")\n\tdbname := os.Getenv(\"DBNAME\")\n\tusername := os.Getenv(\"DBUSER\")\n\tpassword := os.Getenv(\"PASSWORD\")\n\n\tdbString := \"host=\" + hostname + \" user=\" + username + \" dbname=\" + dbname + \" sslmode=disable password=\" + password\n\n\tvar err error\n\tdb, err = gorm.Open(\"postgres\", dbString)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t\tpanic(\"Unable to connect to DB\")\n\t}\n\n\tdb.AutoMigrate(&QuestionModel{})\n\tdb.AutoMigrate(&AnswerModel{})\n\tdb.AutoMigrate(&UserModel{})\n\tdb.AutoMigrate(&Cohort{})\n}", "func Init() {\n config := common.ConfigObject.DB\n var (\n dbName = config.Name\n dbUser = config.User\n dbPass = config.Password\n )\n\tvar err error\n\tdb, err = sql.Open(\"mysql\", fmt.Sprintf(\"%v:%v@/%v\", dbUser, dbPass, dbName))\n\tpe(err)\n}", "func init() {\n\tvar err error\n\n\tsignKey, err = ioutil.ReadFile(privKeyPath)\n\tif err != nil {\n\t\tlog.Fatal(\"Error reading private key\")\n\t\treturn\n\t}\n\n\tverifyKey, err = ioutil.ReadFile(pubKeyPath)\n\tif err != nil {\n\t\tlog.Fatal(\"Error reading private key\")\n\t\treturn\n\t}\n\n\t// set up DB\n\tsession, err = mgo.Dial(\"localhost\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tsession.SetSafe(&mgo.Safe{})\n\n\tuserDB = session.DB(\"Theseus\").C(\"users\")\n\tfileDB = session.DB(\"Theseus\").C(\"files\")\n\tfs = http.FileServer(http.Dir(\"client\"))\n\thttp.HandleFunc(\"/\", routeHandler)\n}", "func Init() {\n\tvar once sync.Once\n\tinitDB := func() {\n\t\tdb, err := gorm.Open(\"mysql\",\n\t\t\tviper.GetString(\"mysql.master.user\")+\":\"+\n\t\t\t\tviper.GetString(\"mysql.master.password\")+\"@tcp(\"+\n\t\t\t\tviper.GetString(\"mysql.master.host\")+\":\"+\n\t\t\t\tviper.GetString(\"mysql.master.port\")+\")/\"+\n\t\t\t\tviper.GetString(\"mysql.master.db\")+\"?charset\"+\n\t\t\t\tviper.GetString(\"mysql.master.charset\")+\"&parseTime=True&loc=Local\")\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tMasterDB = db\n\t\tMasterDB.DB().SetMaxOpenConns(viper.GetInt(\"mysql.master.max_open_conns\"))\n\t\tMasterDB.DB().SetMaxIdleConns(viper.GetInt(\"mysql.master.max_idle_conns\"))\n\t\tMasterDB.DB().SetConnMaxLifetime(time.Minute * time.Duration(viper.GetInt(\"mysql.mas)ter.conn_max_lifetime\")))\n\t\tMasterDB.LogMode(viper.GetBool(\"mysql.master.log_mode\"))\n\t}\n\tif MasterDB == nil {\n\t\t//TODO: add log\n\t\tonce.Do(initDB)\n\t} else if err := MasterDB.DB().Ping(); err != nil {\n\t\t//TODO: add log\n\t\tinitDB()\n\t} else if err := MasterDB.Error; err != nil {\n\t\t//TODO: add log\n\t\tpanic(err)\n\t}\n}", "func Init(d *db.Database, c config.Config) *Server {\n\tr := mux.NewRouter()\n\th := &http.Server{\n\t\tHandler: r,\n\t\tAddr: c.Web.Addr,\n\t\tWriteTimeout: 15 * time.Second,\n\t\tReadTimeout: 15 * time.Second,\n\t}\n\tsrv := &Server{\n\t\tDB: d,\n\t\trouter: r,\n\t\tHS: h,\n\t\tConfig: c,\n\t}\n\treturn srv\n}", "func Initialize() {\n\tconnection := os.Getenv(\"DB_HOST\")\n\tusername := os.Getenv(\"MYSQLUSER\")\n\tpassword := os.Getenv(\"MYSQLPASS\")\n\n\tfor {\n\t\tconn, err := net.DialTimeout(\"tcp\", connection, 6*time.Second)\n\t\tif conn != nil {\n\t\t\tconn.Close()\n\t\t\tbreak\n\t\t}\n\n\t\tlogrus.Info(\"Sleeping till mysql be available... \", err)\n\t\ttime.Sleep(5 * time.Second)\n\t}\n\n\tdb, err := sql.Open(\"mysql\", \"\"+username+\":\"+password+\"@tcp(\"+connection+\")/zinnion?charset=utf8\")\n\tif err != nil {\n\t\tlogrus.Info(\"No connection with mysql, \", err)\n\t}\n\tMysqlClient = db\n}", "func Init() {\n\tdbType := config.LoadEnv(\"DB_CONNECTION\")\n\tvar connectionString string\n\n\tif dbType == \"mysql\" {\n\t\tconnectionString = fmt.Sprintf(\n\t\t\t\"%s:%s@(%s)/%s?charset=utf8mb4&parseTime=True&loc=Local\",\n\t\t\tconfig.LoadEnv(\"DB_USERNAME\"), config.LoadEnv(\"DB_PASSWORD\"), config.LoadEnv(\"DB_HOST\"), config.LoadEnv(\"DB_NAME\"))\n\t} else if dbType == \"postgres\" {\n\t\tconnectionString = fmt.Sprintf(\n\t\t\t\"host=%s port=%s user=%s dbname=%s sslmode=disable password=%s\",\n\t\t\tconfig.LoadEnv(\"DB_HOST\"), config.LoadEnv(\"DB_PORT\"), config.LoadEnv(\"DB_USERNAME\"), config.LoadEnv(\"DB_NAME\"), config.LoadEnv(\"DB_PASSWORD\"))\n\t}\n\n\tdb, err = gorm.Open(dbType, connectionString)\n\tif err != nil {\n\t\tfmt.Println(err.Error())\n\t}\n}", "func initDB(options Options) (*mgo.Session, error) {\n\tdialInfo := &mgo.DialInfo{\n\t\tAddrs: strings.Split(options.DBHost, \",\"),\n\t\tDatabase: options.DBName,\n\t\tUsername: options.DBUser,\n\t\tPassword: options.DBPassword,\n\t\tDialServer: func(addr *mgo.ServerAddr) (net.Conn, error) {\n\t\t\treturn tls.Dial(\"tcp\", addr.String(), &tls.Config{InsecureSkipVerify: true})\n\t\t},\n\t\tReplicaSetName: \"rs0\",\n\t\tTimeout: time.Second * 10,\n\t}\n\n\tif !options.SSL {\n\t\tdialInfo.ReplicaSetName = \"\"\n\t\tdialInfo.DialServer = nil\n\t}\n\t// connect to the database\n\tsession, err := mgo.DialWithInfo(dialInfo)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn session, err\n}", "func Init() error {\n\tvar err error\n\tif connection == nil {\n\t\tlog.Info(\"[Server-Repository] Init DB connection.\")\n\t\targs := fmt.Sprintf(\"host=postgres port=5432 user=postgres dbname=server sslmode=disable password=iforgot\")\n\t\tconnection, err = gorm.Open(\"postgres\", args)\n\t\tif err != nil {\n\t\t\tlog.WithFields(log.Fields{\"error\": err}).Error(\"[Server-Repository] DB open failed.\")\n\t\t\treturn err\n\t\t}\n\t\t// connection.LogMode(true)\n\t\tconnection.SingularTable(true)\n\t} else {\n\t\tlog.Info(\"[Server-Repository] DB connection exist.\")\n\t}\n\n\tv, found := os.LookupEnv(\"build_mock_servers\")\n\tif found && v == \"yes\" {\n\t\tDropTablesIfExist()\n\t\tCreateTables()\n\t\tlog.Info(\"[Server-Repository] Prepare mock servers start.\")\n\t\tprepareMockServers()\n\t\tlog.Info(\"[Server-Repository] Prepare mock servers done.\")\n\t}\n\treturn nil\n}", "func (sql *SqlConnection) InitDB() error {\n\n\tvar err error\n\n\t// open a db connection //\n\tsql.Db, err = gorm.Open(\"sqlite3\", \"/var/tmp/tennis.db\")\n\tif err != nil {\n\t\tfmt.Println(\"Failed to connect database : \", err.Error())\n\t}\n\tsql.Db.LogMode(true)\n\n\treturn err\n}", "func (p *MongodbProvider) Init(expire int64, connStr string) (err error) {\n\t//Fixed:if connStr is \"mongodb://username:password@localhost/myDataBase\",call mgo.Dial() would panic.\n\t//so delete myDataBase from connStr,then call mgo.Dial();next call session.DB().\n\tvar db string\n\ti := strings.LastIndex(connStr, \"?\")\n\tif i > 0 {\n\t\tconnStr = connStr[:i-1]\n\t}\n\ti = strings.LastIndex(connStr, \"/\")\n\tif i > 0 {\n\t\tif strings.HasPrefix(connStr, \"mongodb://\") {\n\t\t\tif i > len(\"mongodb://\") {\n\t\t\t\tdb = connStr[i+1:]\n\t\t\t\tconnStr = connStr[:i]\n\t\t\t}\n\t\t}\n\t}\n\t//\n\tp.expire = expire\n\tp.session, err = mgo.Dial(connStr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif db == \"\" {\n\t\tvar dbname []string\n\t\tdbname, err = p.session.DatabaseNames()\n\t\tif (len(dbname) == 0) && err != nil {\n\t\t\tpanic(\"Need database name\")\n\t\t}\n\t\tdb = dbname[0]\n\t}\n\tp.c = p.session.DB(db).C(\"session\")\n\treturn p.session.Ping()\n}", "func init() {\r\n\tCandidates = ConnectDB(\"Candidates\")\r\n}", "func (sdb *SolarDb) Init() error {\n\tlogger.Infof(\"Open db connection (db: %q)\", sdb.dbConfig.Name)\n\tdb, err := sql.Open(\"mysql\", fmt.Sprintf(\"%s:%s@tcp(127.0.0.1:3306)/%s\", sdb.dbConfig.User, sdb.dbConfig.Password, sdb.dbConfig.Name))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// construct a gorp DbMap\n\tsdb.dbMap = &gorp.DbMap{Db: db, Dialect: gorp.MySQLDialect{Engine: \"InnoDB\", Encoding: \"UTF8\"}}\n\n\tsdb.dbMap.AddTableWithName(models.ChargeControllerMeasurement{}, config.ChargeControllerMeasurementsTable).SetKeys(false, \"Id\")\n\tsdb.dbMap.AddTableWithName(models.ShuntMeasurement{}, config.ShuntMeasurementsTable).SetKeys(false, \"Id\")\n\tsdb.dbMap.AddTableWithName(models.TemperatureMeasurement{}, config.TemperatureMeasurementsTable).SetKeys(false, \"Id\")\n\tsdb.dbMap.AddTableWithName(models.HumidityMeasurement{}, config.HumidityMeasurementsTable).SetKeys(false, \"Id\")\n\n\treturn nil\n}", "func (instance *DBSyncSlave) init() {\n\tif nil != instance {\n\t\tif len(instance.Config.Uuid) > 0 {\n\t\t\tinstance.UID = instance.Config.Uuid\n\t\t} else {\n\t\t\tinstance.UID, _ = lygo_sys.ID()\n\t\t}\n\t\tif nil == instance.client {\n\t\t\tinstance.client = lygo_nio.NewNioClient(instance.Config.Host(), instance.Config.Port())\n\t\t\tinstance.client.OnConnect(instance.doConnect)\n\t\t\tinstance.client.OnDisconnect(instance.doDisconnect)\n\t\t}\n\t}\n}", "func Init() *gorm.DB {\n\tdb, err := gorm.Open(\"sqlite3\", \"test.db\")\n\tif err != nil {\n\t\tfmt.Println(\"db err: \", err)\n\t}\n\tdb.DB().SetMaxIdleConns(10)\n\tdb.LogMode(true)\n\tDB = db\n\treturn DB\n}", "func InitDB() error {\n\tvar clientPath string = path.Join(dbPath, \"clients\")\n\tvar serverPath string = path.Join(dbPath, \"server\")\n\tvar serverInterfacePath string = path.Join(serverPath, \"interfaces.json\")\n\tvar serverKeyPairPath string = path.Join(serverPath, \"keypair.json\")\n\tvar globalSettingPath string = path.Join(serverPath, \"global_settings.json\")\n\tvar userPath string = path.Join(serverPath, \"users.json\")\n\n\t// create directories if they do not exist\n\tif _, err := os.Stat(clientPath); os.IsNotExist(err) {\n\t\tos.MkdirAll(clientPath, os.ModePerm)\n\t}\n\tif _, err := os.Stat(serverPath); os.IsNotExist(err) {\n\t\tos.MkdirAll(serverPath, os.ModePerm)\n\t}\n\n\t// server's interface\n\tif _, err := os.Stat(serverInterfacePath); os.IsNotExist(err) {\n\t\tdb, err := DBConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tserverInterface := new(model.ServerInterface)\n\t\tserverInterface.Addresses = []string{defaultServerAddress}\n\t\tserverInterface.ListenPort = defaultServerPort\n\t\tserverInterface.UpdatedAt = time.Now().UTC()\n\t\tdb.Write(\"server\", \"interfaces\", serverInterface)\n\t}\n\n\t// server's key pair\n\tif _, err := os.Stat(serverKeyPairPath); os.IsNotExist(err) {\n\t\tdb, err := DBConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tkey, err := wgtypes.GeneratePrivateKey()\n\t\tif err != nil {\n\t\t\treturn scribble.ErrMissingCollection\n\t\t}\n\t\tserverKeyPair := new(model.ServerKeypair)\n\t\tserverKeyPair.PrivateKey = key.String()\n\t\tserverKeyPair.PublicKey = key.PublicKey().String()\n\t\tserverKeyPair.UpdatedAt = time.Now().UTC()\n\t\tdb.Write(\"server\", \"keypair\", serverKeyPair)\n\t}\n\n\t// global settings\n\tif _, err := os.Stat(globalSettingPath); os.IsNotExist(err) {\n\t\tdb, err := DBConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tpublicInterface, err := GetPublicIP()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tglobalSetting := new(model.GlobalSetting)\n\t\tglobalSetting.EndpointAddress = publicInterface.IPAddress\n\t\tglobalSetting.DNSServers = []string{defaultDNS}\n\t\tglobalSetting.MTU = defaultMTU\n\t\tglobalSetting.PersistentKeepalive = defaultPersistentKeepalive\n\t\tglobalSetting.ConfigFilePath = defaultConfigFilePath\n\t\tglobalSetting.UpdatedAt = time.Now().UTC()\n\t\tdb.Write(\"server\", \"global_settings\", globalSetting)\n\t}\n\n\t// user info\n\tif _, err := os.Stat(userPath); os.IsNotExist(err) {\n\t\tdb, err := DBConn()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tuser := new(model.User)\n\t\tuser.Username = getCredVar(username_env_var, defaultUsername)\n\t\tuser.Password = getCredVar(password_env_var, defaultPassword)\n\t\tdb.Write(\"server\", \"users\", user)\n\t}\n\n\treturn nil\n}", "func Init(popconn *pop.Connection) {\n\tif popconn == nil {\n\t\tpanic(\"Please, provide a valid database connection <nil>\")\n\t}\n\n\tif db == nil {\n\t\tdb = popconn\n\t}\n\n}", "func initMain() {\n\t// init db connection\n\tsalesDB = openMySql(\n\t\tappConfig.GetString(KEY_SALES_DB_HOST),\n\t\tappConfig.GetString(KEY_SALES_DB_USER),\n\t\tappConfig.GetString(KEY_SALES_DB_PWD),\n\t\tappConfig.GetInt(KEY_SALES_DB_MAX_CONNS),\n\t\t\"SALSES\")\n\n\tcoreService = core.New(\n\t\tsalesDB,\n\t)\n\n\tcoreService.Start()\n}", "func dbInit(dbc co.DbConnectionRequest) {\n\tdb, err := sql.Open(\"mysql\", dbc.User+\":\"+dbc.Pwd+\"@tcp(\"+dbc.Server+\":\"+dbc.Port+\")/\")\n\tif err != nil {\n\t\tpanic(err.Error()) // proper error handling instead of panic in your app\n\t}\n\tfor _, stmt := range organizationsSchema {\n\t\tfmt.Println(stmt)\n\t\t_, err := db.Exec(stmt)\n\t\tif err != nil {\n\t\t\tpanic(err.Error()) // proper error handling instead of panic in your app\n\t\t}\n\t}\n\tdb.Close()\n\treturn\n}", "func init() {\n\tRepoCreateDatabaseConnection(DatabaseConnection{Name: \"Write presentation\"})\n\tRepoCreateDatabaseConnection(DatabaseConnection{Name: \"Host meetup\"})\n}", "func Init(CDB config.DatabaseConfig) (db *DataBase, err error) {\n\n\t// for local launch\n\tif os.Getenv(CDB.URL) == \"\" {\n\t\tos.Setenv(CDB.URL, \"user=db_forum_user password=db_forum_password dbname=db_forum sslmode=disable\")\n\t}\n\n\tvar database *sql.DB\n\tif database, err = sql.Open(CDB.DriverName, os.Getenv(CDB.URL)); err != nil {\n\t\tutils.PrintDebug(\"database/Init cant open:\" + err.Error())\n\t\treturn\n\t}\n\n\tdb = &DataBase{\n\t\tDb: database,\n\t}\n\tdb.Db.SetMaxOpenConns(CDB.MaxOpenConns)\n\n\tif err = db.Db.Ping(); err != nil {\n\t\tutils.PrintDebug(\"database/Init cant access:\" + err.Error())\n\t\treturn\n\t}\n\tutils.PrintDebug(\"database/Init open\")\n\tif err = db.CreateTables(); err != nil {\n\t\treturn\n\t}\n\treturn\n}", "func DBInit(file string) *PSQLService {\n\tcfg := config.NewConfig(file)\n\ts := cfg.Service\n\tpsql := fmt.Sprintf(psqlInfo, s.Host, s.Port, s.User, s.Password, s.Name)\n\tdb, err := sql.Open(driverName, psql)\n\tif err != nil {\n\t\tlog.Printf(\"Error opening SQL db: %s\", err.Error())\n\t}\n\terr = db.Ping()\n\tif err != nil {\n\t\tlog.Printf(\"Error pingng SQL db: %s\", err.Error())\n\t}\n\treturn &PSQLService{\n\t\tDB: db,\n\t}\n}", "func Init(debug bool) {\n\t// Connect to the database\n\tInitDB(debug)\n}", "func initDatabase() {\n\n\tpsqlInfo := fmt.Sprintf(\"host=%s port=%d user=%s password=%s dbname=%s sslmode=disable\", host, port, user, password, dbname)\n\n\tvar err error\n\tdbClient, err = sqlx.Open(\"postgres\", psqlInfo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tdefer dbClient.Close()\n\n\terr = dbClient.Ping()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(\"Successfully connected!\")\n}", "func (db *Database) Init() {\n\tdata, dbErr := tiedot.OpenDB(db.Location)\n\tif dbErr != nil {\n\t\tlog.Error(dbConnectionError{\n\t\t\tmsg: \"Failed to connect to the tiedot database\",\n\t\t\terr: dbErr,\n\t\t})\n\t}\n\n\t// Set up the collections - throw away the error for now.\n\tfor _, c := range db.Collections {\n\t\tdata.Create(c.Name)\n\t\tdata.Use(c.Name).Index(c.Index)\n\t}\n\n\tdb.Data = data\n}", "func Init() error {\n\tvar err error\n\tif DB, err = sqlx.Open(\n\t\t\"postgres\",\n\t\t\"postgres://bokwoon@localhost:5433/orbitaldb_dev?sslmode=disable\",\n\t); err != nil {\n\t\treturn erro.Wrap(err)\n\t}\n\treturn nil\n}", "func (m *MongoDB) Init() (err error) {\n\n\tif m.client, err = m.connect(); err != nil {\n\t\tfmt.Printf(\"[ERROR]: Can't connect to the host: [%s]. [Err Detail: %s]\\n \", m.Host, err)\n\t}\n\n\treturn\n}", "func init() {\n\tlog.DebugMode = cuxs.IsDebug()\n\tlog.Log = log.New()\n\n\tif e := cuxs.DbSetup(); e != nil {\n\t\tpanic(e)\n\t}\n}", "func initDb(username, password, endpoint, port, database string) (*sql.DB, error) {\n\t// Create url for connection\n\turl := fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?parseTime=true\", username, password, endpoint, port, database)\n\n\t// Open connection to SQL DB\n\tdb, err := sql.Open(\"mysql\", url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Test database connection\n\terr = db.Ping()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn db, err\n}", "func Init(conn *gorm.DB) {\n\tdb = conn\n}", "func (a *acceptor) init() error {\n\tinstanceID, err := a.state.load()\n\tif err != nil {\n\t\tlNLErr(\"Load State fail, error: %v\", err)\n\t\treturn err\n\t}\n\n\tif instanceID == 0 {\n\t\tlPLGImp(a.conf.groupIdx, \"Empty database\")\n\t}\n\n\ta.setInstanceID(instanceID)\n\n\tlPLGImp(a.conf.groupIdx, \"OK\")\n\n\treturn nil\n}", "func (r *RavelDatabase) Init(path string) error {\n\tvar err error\n\n\toptions := badger.DefaultOptions(path)\n\toptions.Logger = nil\n\toptions.SyncWrites = true\n\n\tr.Conn, err = badger.Open(options)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func Init() *gorm.DB {\n\tdb, err := gorm.Open(\"mysql\", DbURL(BuildDBConfig())) //gorm.Open(mysql) from mysql //(postgres) for postgres\n\tif err != nil {\n\t\tfmt.Println(\"db err: \", err)\n\t}\n\tdb.DB().SetMaxIdleConns(10)\n\t//db.LogMode(true)\n\tDB = db\n\treturn DB\n}", "func (g *Gateway) init(bootstrap bool) error {\n\tlogger.Debugf(\"Initializing database gateway\")\n\tg.stopCh = make(chan struct{})\n\n\tinfo, err := loadInfo(g.db, g.networkCert)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to create raft factory: %w\", err)\n\t}\n\n\tdir := filepath.Join(g.db.Dir(), \"global\")\n\tif shared.PathExists(filepath.Join(dir, \"logs.db\")) {\n\t\treturn fmt.Errorf(\"Unsupported upgrade path, please first upgrade to LXD 4.0\")\n\t}\n\n\t// If the resulting raft instance is not nil, it means that this node\n\t// should serve as database node, so create a dqlite driver possibly\n\t// exposing it over the network.\n\tif info != nil {\n\t\t// Use the autobind feature of abstract unix sockets to get a\n\t\t// random unused address.\n\t\tlistener, err := net.Listen(\"unix\", \"\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to autobind unix socket: %w\", err)\n\t\t}\n\n\t\tg.bindAddress = listener.Addr().String()\n\t\t_ = listener.Close()\n\n\t\toptions := []dqlite.Option{\n\t\t\tdqlite.WithBindAddress(g.bindAddress),\n\t\t}\n\n\t\tif info.Address == \"1\" {\n\t\t\tif info.ID != 1 {\n\t\t\t\tpanic(\"unexpected server ID\")\n\t\t\t}\n\n\t\t\tg.memoryDial = dqliteMemoryDial(g.bindAddress)\n\t\t\tg.store.inMemory = client.NewInmemNodeStore()\n\t\t\terr = g.store.Set(context.Background(), []client.NodeInfo{info.NodeInfo})\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"Failed setting node info in store: %w\", err)\n\t\t\t}\n\t\t} else {\n\t\t\tgo runDqliteProxy(g.stopCh, g.bindAddress, g.acceptCh)\n\t\t\tg.store.inMemory = nil\n\t\t\toptions = append(options, dqlite.WithDialFunc(g.raftDial()))\n\t\t}\n\n\t\tserver, err := dqlite.New(\n\t\t\tinfo.ID,\n\t\t\tinfo.Address,\n\t\t\tdir,\n\t\t\toptions...,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create dqlite server: %w\", err)\n\t\t}\n\n\t\t// Force the correct configuration into the bootstrap node, this is needed\n\t\t// when the raft node already has log entries, in which case a regular\n\t\t// bootstrap fails, resulting in the node containing outdated configuration.\n\t\tif bootstrap {\n\t\t\tlogger.Debugf(\"Bootstrap database gateway ID:%v Address:%v\",\n\t\t\t\tinfo.ID, info.Address)\n\t\t\tcluster := []dqlite.NodeInfo{\n\t\t\t\t{ID: uint64(info.ID), Address: info.Address},\n\t\t\t}\n\n\t\t\terr = server.Recover(cluster)\n\t\t\tif err != nil {\n\t\t\t\treturn fmt.Errorf(\"Failed to recover database state: %w\", err)\n\t\t\t}\n\t\t}\n\n\t\terr = server.Start()\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to start dqlite server: %w\", err)\n\t\t}\n\n\t\tg.lock.Lock()\n\t\tg.server = server\n\t\tg.info = info\n\t\tg.lock.Unlock()\n\t} else {\n\t\tg.lock.Lock()\n\t\tg.server = nil\n\t\tg.info = nil\n\t\tg.store.inMemory = nil\n\t\tg.lock.Unlock()\n\t}\n\n\tg.lock.Lock()\n\tg.store.onDisk = client.NewNodeStore(\n\t\tg.db.DB(), \"main\", \"raft_nodes\", \"address\")\n\tg.lock.Unlock()\n\n\treturn nil\n}", "func Init(dbpath string) {\n\tdatabase.db, err = sql.Open(\"sqlite3\", dbpath+\"?loc=auto&parseTime=true\")\n\t// database.db, err = sql.Open(\"mysql\", \"Username:Password@tcp(Host:Port)/standardnotes?parseTime=true\")\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif database.db == nil {\n\t\tlog.Fatal(\"db nil\")\n\t}\n\tdatabase.createTables()\n}", "func (*ConnectDB) InitDB() framework.Mssql {\n\n\tvar user = framework.SA{\n\t\tUsername: \"sdxonestop\",\n\t\tPassword: \"baiyun+=1992\",\n\t}\n\n\tdb := framework.Mssql{\n\t\tDataSource: \"BAIYUN-MOBL1\",\n\t\tDatabase: \"SdxOneStopDB\",\n\t\t// windwos: true 为windows身份验证,false 必须设置sa账号和密码\n\t\tWindows: true,\n\t\tSa: user,\n\t}\n\t// 连接数据库\n\terr := db.Open()\n\tif err != nil {\n\t\tfmt.Println(\"sql open:\", err)\n\t\treturn db\n\t} else {\n\t\tfmt.Println(\"Succeed to open DB...\")\n\t}\n\t//defer db.Close()\n\n\treturn db\n}", "func InitDB() {\n\tDBOnce.Do(func() {\n\t\tvar err error\n\t\tDBReader, err = gorm.Open(\"mysql\", Config.DB.MysqlServerRead)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Init DB error : %v\", err)\n\t\t\treturn\n\t\t}\n\t\tDBReader.LogMode(Config.DB.LogFlag)\n\t\tDBWriter, err = gorm.Open(\"mysql\", Config.DB.MysqlServerWrite)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"Init DB error : %v\", err)\n\t\t\treturn\n\t\t}\n\t\tDBReader.LogMode(Config.DB.LogFlag)\n\t\tDBWriter.LogMode(Config.DB.LogFlag)\n\t})\n\n}", "func init() {\n\trouter = chi.NewRouter()\n\trouter.Use(middleware.Recoverer)\n\n\tdbSource := fmt.Sprintf(\"root:%s@tcp(%s:%s)/%s?charset=utf8\", dbPass, dbHost, dbPort, dbName)\n\n\tvar err error\n\tdb, err = sql.Open(\"mysql\", dbSource)\n\n\tcatch(err)\n}", "func InitDatabase() *Database {\n\t// eg. \"postgres://postgres:postgres@localhost/postgres?sslmode=disable\"\n\t// TODO: enable SSL on DB\n\tconn, err := sql.Open(\"postgres\", os.Getenv(\"PG_CONNECTION_STRING\"))\n\tif err != nil {\n\t\tlog.Fatal(err) // kill server if we can't use DB on startup\n\t}\n\treturn &Database{\n\t\tconn: conn,\n\t}\n}", "func init() {\n\tloadTheEnv()\n\tcreateDBInstance()\n\tloadRepDB()\n\tstartKafka()\n}", "func dbInit() {\n\t//User Input\n\tusernm := creds.UserName\n\tpass := creds.Password\n\tDBName := creds.DBName\n\tlit.Debug(\"Hit dbInit \" + DBName)\n\tlog.Println(usernm + \":\" + pass + \"@tcp(127.0.0.1:3306)/\")\n\n\tdb, err := sql.Open(\"mysql\", usernm+\":\"+pass+\"@tcp(127.0.0.1:3306)/\")\n\terr = db.Ping() //Need to ping to generate connection and trigger err\n\tif err != nil {\n\t\tlit.Error(\"Error in Init Log-in\")\n\t\tcreds = getCreds()\n\t\tfile, _ := json.MarshalIndent(creds, \"\", \"\\t\")\n\t\t_ = ioutil.WriteFile(\"configs/creds.json\", file, 0644)\n\t} else {\n\t\tlit.Debug(\"Attempt DB Creation\")\n\t\t_, err = db.Exec(\"CREATE DATABASE \" + DBName)\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t} else {\n\t\t\tlog.Println(\"Database Created:\", \"\\\"\"+DBName+\"\\\"\")\n\t\t}\n\t\tdb.Exec(\"USE \" + DBName)\n\t\tstmt, err := db.Prepare(\"CREATE TABLE `employee` (`id` int(6) unsigned NOT NULL AUTO_INCREMENT,`name` varchar(30) NOT NULL,`city` varchar(30) NOT NULL,PRIMARY KEY (`id`));\")\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t} else {\n\t\t\t_, err = stmt.Exec()\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(err.Error())\n\t\t\t} else {\n\t\t\t\tlog.Println(\"Table Created\", \"\\\"\"+\"employees\"+\"\\\"\")\n\t\t\t}\n\t\t}\n\t}\n}", "func (Server *Server) Initialize(Dbdriver, DbUser, DbPassword, DbPort, DbHost, DbName string) {\n\n\tvar err error\n\tDBURL := fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?charset=utf8&parseTime=True&loc=Local\", DbUser, DbPassword, DbHost, DbPort, DbName)\n\tServer.DBServer, err = gorm.Open(Dbdriver, DBURL)\n\tif err != nil {\n\t\tfmt.Printf(\"Cannot connect to %s database\", Dbdriver)\n\t\tlog.Fatal(\"This is the error:\", err)\n\t} else {\n\t\tfmt.Printf(\"We are connected to the %s database\", Dbdriver)\n\t}\n\n\tServer.DBServer.Debug().AutoMigrate(&Data{})\n}", "func Initialize() {\n\tdatabaseHost := os.Getenv(\"DB_HOST\")\n\tdatabasePort := os.Getenv(\"DB_PORT\")\n\tdatabaseUser := os.Getenv(\"DB_USER\")\n\tdatabasePass := os.Getenv(\"DB_PASS\")\n\tdatabaseName := os.Getenv(\"DB_NAME\")\n\n\tpostgresConnectionURL := fmt.Sprintf(\"postgres://%s:%s@%s:%s/%s?sslmode=disable\", databaseUser, databasePass, databaseHost, databasePort, databaseName)\n\n\tvar err error\n\tdb, err = sql.Open(\"postgres\", postgresConnectionURL)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t//defer db.Close()\n\n\terr = db.Ping()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tmaxOpenConn, err := strconv.Atoi(os.Getenv(\"DB_MAX_OPEN_CONN\"))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tmaxIdleConn, err := strconv.Atoi(os.Getenv(\"DB_MAX_IDLE_CONN\"))\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tdb.SetMaxOpenConns(maxOpenConn)\n\tdb.SetMaxIdleConns(maxIdleConn)\n\n\tfmt.Println(\"Database connected!\")\n\n}", "func init() {\n\t// Open a connection to GORM\n\tdb, err := gorm.Open(\"sqlite3\", \"shop.db\")\n\tif err != nil {\n\t\tpanic(\"Failed to connect database\")\n\t}\n\n\tDB = db\n\n\tDB.AutoMigrate(models.Supply{})\n}", "func DBInit() {\n\t// Mode = \"PRODUCTION\"\n\t// if Mode == \"PRODUCTION\" {\n\t// \tDatabaseURL = \"test.sqlite3\"\n\t// \tDatabaseName = \"sqlite3\"\n\t// } else if Mode == \"DEPLOY\" {\n\tDatabaseURL = os.Getenv(\"DATABASE_URL\")\n\tDatabaseName = \"postgres\"\n\t// }\n\n\tdb, err := gorm.Open(DatabaseName, DatabaseURL)\n\tif err != nil {\n\t\tpanic(\"We can't open database!(dbInit)\")\n\t}\n\t//残りのモデルはまだ入れてない。\n\tdb.AutoMigrate(&model.Post{})\n\tdb.AutoMigrate(&model.User{})\n\tdb.AutoMigrate(&model.Room{})\n\tdefer db.Close()\n}", "func (meta *Meta) init() {\n\tmeta.client = utils.CreateMongoDB(dbConfig.Str(\"address\"), log)\n\tmeta.database = meta.client.Database(dbConfig.Str(\"db\"))\n\tmeta.collection = meta.database.Collection(metaCollection)\n}", "func DBInit() *gorm.DB {\n\te := godotenv.Load() //Load .env file\n\tif e != nil {\n\t\tfmt.Print(e)\n\t}\n\n\thost := os.Getenv(\"DB_HOST\")\n\tport := os.Getenv(\"DB_PORT\")\n\tpassword := os.Getenv(\"DB_PASSWORD\")\n\tdbUser := os.Getenv(\"DB_USER\")\n\tdbName := os.Getenv(\"DB_NAME\")\n\tdbURI := fmt.Sprintf(\"%s:%s@tcp(%s:%s)/%s?charset=utf8&parseTime=True&loc=%s\", dbUser, password, host, port, dbName, \"Asia%2FJakarta\")\n\n\tdb, err := gorm.Open(\"mysql\", dbURI)\n\tif err != nil {\n\t\tlog.Panicf(\"failed to connect to database with err : %s \", err)\n\t}\n\tdb.DB().SetConnMaxLifetime(time.Minute * 5)\n\tdb.DB().SetMaxIdleConns(0)\n\tdb.DB().SetMaxOpenConns(5)\n\n\tdb.LogMode(true)\n\n\tdB = db\n\tdb.AutoMigrate(\n\t\t&domain.Transaction{},\n\t\t&domain.TransactionDetail{},\n\t\t&domain.Cart{},\n\t\t&domain.CartDetail{},\n\t\t&domain.Product{},\n\t\t&domain.StatusCode{},\n\t)\n\treturn dB\n}", "func InitDb(host string, user string, port int, sslmode string, dbName string, password string) (interfaces.Database, error) {\n\tconnStr := fmt.Sprintf(\n\t\t\"host=%s user=%s port=%d sslmode=%s dbname=%s\",\n\t\thost, user, port, sslmode, dbName,\n\t)\n\tif password != \"\" {\n\t\tconnStr += fmt.Sprintf(\" password=%s\", password)\n\t}\n\tdb, err := sql.Open(\"postgres\", connStr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdb.SetMaxIdleConns(5)\n\tdb.SetMaxOpenConns(10)\n\n\tdbmap := &gorp.DbMap{\n\t\tDb: db,\n\t\tDialect: gorp.PostgresDialect{},\n\t\tTypeConverter: util.TypeConverter{},\n\t}\n\n\tdbmap.AddTableWithName(Game{}, \"games\").SetKeys(true, \"ID\")\n\tdbmap.AddTableWithName(Player{}, \"players\").SetKeys(true, \"ID\")\n\tdbmap.AddTableWithName(EncryptedPlayer{}, \"encrypted_players\")\n\tdbmap.AddTableWithName(Clan{}, \"clans\").SetKeys(true, \"ID\")\n\tdbmap.AddTableWithName(Membership{}, \"memberships\").SetKeys(true, \"ID\")\n\tdbmap.AddTableWithName(Hook{}, \"hooks\").SetKeys(true, \"ID\")\n\n\t// dbmap.TraceOn(\"[gorp]\", log.New(os.Stdout, \"KHAN:\", log.Lmicroseconds))\n\treturn egorp.New(dbmap, dbName), nil\n}", "func initDB() {\n\tvar err error\n\tpsqlInfo := fmt.Sprintf(\"host=%s port=%d user=%s \"+\n\t\t\"password=%s dbname=%s sslmode=disable\",\n\t\thost, port, user, password, dbname)\n\tdb, err = sql.Open(\"postgres\", psqlInfo)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n}", "func InitDB(driver, connectionstring string) error {\n\tdb, err := gorm.Open(driver, connectionstring)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsetDB(db)\n\treturn nil\n}", "func InitDatabase(dsn string) error {\n\tfmt.Println(\"Init db connection\")\n\t// config := mysql.NewConfig()\n\t// config.User = username\n\t// config.Passwd = password\n\t// config.Net = protocol\n\t// config.Addr = host\n\t// config.DBName = database\n\t// config.Params = map[string]string{\n\t// \t\"charset\": charset,\n\t// \t\"parseTime\": \"True\",\n\t// }\n\tdb, err := gorm.Open(\"sqlite3\", dsn)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn err\n\t}\n\tDbConn = db\n\treturn nil\n}", "func init() {\n\t// This function will be executed before everything else.\n\t// Do some initialization here.\n\tSBC = data.NewBlockChain()\n\t// When server works\n\t// Peers = data.NewPeerList(Register(), 32)\n\t// While server doesn't work -> use port as id\n\tid, _ := strconv.ParseInt(os.Args[1], 10, 64)\n\tPeers = data.NewPeerList(int32(id), 32)\n\tifStarted = true\n}", "func DBInit() *gorm.DB {\n\t//db, err := gorm.Open(\"mysql\", \"root:@tcp(128.199.211.144:3306)/godb?charset=utf8&parseTime=True&loc=Local\")\n\tdb, err := gorm.Open(\"mysql\",\"root:orion2402@tcp(localhost:3306)/popfren?charset=utf8&parseTime=True&loc=Local\")\n\tif err != nil {\n\t\tpanic(\"failed to connect to database\")\n\t}\n\n\tdb.AutoMigrate(structs.Person{})\n\treturn db\n}", "func Init() {\n\tvar err error\n\tdsn := \"host=localhost user=postgres password=postgres dbname=goMux_db port=5430 sslmode=disable\"\n\tDB, err = gorm.Open(postgres.Open(dsn), &gorm.Config{})\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tlog.Fatal(err)\n\t}\n\tlog.Println(\"Connect to the DB\")\n\t// DB.AutoMigrate(&models.User{})\n}", "func Init(path string) *DB {\n\tonce.Do(func() {\n\t\tsql = (&DB{address: path}).init()\n\t})\n\treturn sql\n}", "func Init() {\n\tdbConn, err := mysql.OpenConnection()\n\tif err != nil {\n\t\tpanic(\"Failed to open connection to remote database.\")\n\t}\n\tmysql.InitDB(dbConn)\n\n\tgo reaper.StartReapingScheduler(1 * time.Minute)\n}", "func InitDB() {\n\tvar err error\n\tvar connectionString = fmt.Sprintf(\"%v:%v@%v/%v\", dbUser, dbPassword, dbHost, dbName)\n\tlog.Println(\"Connection String: \" + connectionString)\n\tdb, err = sql.Open(\"mysql\", connectionString)\n\tdbmap = &gorp.DbMap{Db: db, Dialect: gorp.MySQLDialect{Engine: \"InnoDB\", Encoding: \"UTF8\"}}\n\tdbmap.TraceOn(\"[gorp]\", log.New(os.Stdout, \"myapp:\", log.Lmicroseconds))\n\tif err != nil {\n\t\tlog.Println(\"Failed to connect to database: \")\n\t\tlog.Panic(err)\n\t} else {\n\t\terr = db.Ping()\n\n\t\tif err != nil {\n\t\t\tlog.Println(\"Failed to ping database: \")\n\t\t\tlog.Panic(err)\n\t\t} else {\n\t\t\tlog.Println(\"Database connected.\")\n\t\t}\n\t}\n\n\t_ = dbmap.AddTableWithName(Article{}, \"flat_articles\").SetKeys(false, \"ID\")\n\tdbmap.CreateTablesIfNotExists()\n}", "func Init(st int64) {\n\tStartTime =st\n\t//连接数据库做初始化\n\tdbhost := beego.AppConfig.String(\"db.host\")\n\tdbuser := beego.AppConfig.String(\"db.user\")\n\tdbpassword := beego.AppConfig.String(\"db.password\")\n\tdbname := beego.AppConfig.String(\"db.name\")\n\tdbport := beego.AppConfig.String(\"db.port\")\n\ttimezone := beego.AppConfig.String(\"db.timezone\")\n\tif dbport == \"\"{\n\t\tdbport = \"3306\"\n\t}\n\tds := dbuser+\":\"+dbpassword+\"@tcp(\"+dbhost+\":\"+dbport+\")/\"+dbname+\"?charset=utf8\"\n\t//Asia/Shanghai 编码后Asia%2FShanghai\n\tif timezone !=\"\"{\n\t\t//URL编码处理处理后“Asia%2FShanghai”,可以被识别\n\t\tds = ds+\"&loc=\"+url.QueryEscape(timezone)\n\t\t//ds = ds + \"&loc=\" + timezone\n\t\tfmt.Println(\"dssssss\",ds)\n\n\t}\n\t//连接数据库\n\terr := orm.RegisterDataBase(\"default\", \"mysql\", ds, 50, 30)\n\tif err != nil{\n\t\tfmt.Println(err)\n\t\treturn\n\t}\n\t//注册model\n\torm.RegisterModel(\n\t\tnew(Admin),\n\t\tnew(Auth),\n\t\tnew(Ban),\n\t\tnew(Role),\n\t\tnew(RoleAuth),\n\t\tnew(TaskServer),\n\t\tnew(ServerGroup),\n\t\tnew(Task),\n\t\tnew(Group),\n\t\tnew(TaskLog),\n\t\t)\n\t//调试阶段 ,可以设置执行sql语句\n\torm.Debug = true\n\n}", "func (i *API) InitDB(purge bool) error {\n\tif purge {\n\t\ti.purgeDB()\n\t}\n\treturn i.openDB()\n}", "func (ms *MySQL) Init() {\n\t//db.Initialize(ms.Config.DbDriver(), ms.Config.DbUser(), ms.Config.DbPass(), ms.Config.DbName())\n\tdb.Initialize(ms.Config.DbDriver(), ms.Config.Conn())\n\tlog.Info(\"db connected\")\n}", "func InitDB() {\n\tconnStr := \"user=osama dbname=hackernews password=ibnjunaid \"\n\t// Use root:dbpass@tcp(172.17.0.2)/hackernews, if you're using Windows.\n\tdb, err := sql.Open(\"postgres\", connStr)\n\tif err != nil {\n\t\tlog.Panic(err)\n\t}\n\tDb = db\n\n}", "func Init(cfg config.Database) {\n\tdsn := fmt.Sprintf(\"%s:%s@%s(%s:%d)/%s?charset=%s&loc=%s\",\n\t\tcfg.Username, cfg.Password, cfg.Protocol, cfg.Host, cfg.Port, cfg.Dbname, cfg.Charset, cfg.Loc)\n\n\tvar err error\n\tdb, err = sqlx.Open(\"mysql\", dsn)\n\tif err != nil {\n\t\tpanic(\"Error occurred in opening database.\")\n\t}\n}", "func init() {\n\tvar (\n\t\terr error\n\t)\n\tif RDB, err = GetRDB(); err != nil {\n\t\tlogrus.Fatal(err)\n\t}\n\tlogrus.Infoln(\"redis init success !\")\n}", "func (server *Server) Initialize(Dbdriver, DbUser, DbPassword, DbPort, DbHost, DbName string) *gorm.DB {\n\tvar err error\n\n\tif Dbdriver == \"postgres\" {\n\t\tDBURL := fmt.Sprintf(\"host=%s port=%s user=%s dbname=%s sslmode=disable password=%s\", DbHost, DbPort, DbUser, DbName, DbPassword)\n\t\tserver.DB, err = gorm.Open(Dbdriver, DBURL)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Cannot connect to %s database\", err)\n\n\t\t} else {\n\t\t\tlog.Printf(\"We are connected to the %s database\", Dbdriver)\n\t\t}\n\t}\n\n\treturn server.DB\n}", "func InitSelfDB() *sql.DB {\n\treturn openDB(viper.GetString(\"db.username\"),\n\t\tviper.GetString(\"db.password\"),\n\t\tviper.GetString(\"db.addr\"),\n\t\tviper.GetString(\"db.name\"))\n}", "func (d *DBManager) Init() (err error) {\n\tif d.db, err = initDatabase(); err != nil {\n\t\treturn err\n\t}\n\n\treturn prepareSQL(d)\n}" ]
[ "0.78455776", "0.78455776", "0.77536654", "0.7595937", "0.75200224", "0.7362178", "0.730094", "0.7293126", "0.7170423", "0.7073334", "0.70680016", "0.7013162", "0.70112926", "0.70082915", "0.6979019", "0.69747376", "0.69691765", "0.69362575", "0.6927551", "0.6926103", "0.69203436", "0.6915067", "0.68954045", "0.68925864", "0.68879664", "0.68758315", "0.6853676", "0.68472284", "0.681913", "0.6801632", "0.67991513", "0.6798766", "0.6797038", "0.6788145", "0.6787508", "0.67278206", "0.66804224", "0.6667953", "0.6663081", "0.66596866", "0.66481084", "0.66366297", "0.66319364", "0.6631211", "0.6625238", "0.6585267", "0.657935", "0.6577066", "0.6556754", "0.6550788", "0.65505165", "0.65493226", "0.6548096", "0.65462184", "0.6545117", "0.65356076", "0.6531901", "0.65288323", "0.6526511", "0.65224546", "0.65213907", "0.6506902", "0.64999914", "0.64845103", "0.64673764", "0.6466006", "0.64644545", "0.64643687", "0.64598376", "0.6438295", "0.64321256", "0.64263654", "0.64260685", "0.64241236", "0.6421673", "0.6419761", "0.640605", "0.640051", "0.63964874", "0.6376184", "0.63706404", "0.6370055", "0.63695294", "0.63608223", "0.6355863", "0.63535714", "0.635007", "0.63377833", "0.63363445", "0.6331033", "0.63145506", "0.631002", "0.6305666", "0.6299447", "0.62900114", "0.62889504", "0.6271703", "0.6271038", "0.6270542", "0.62675107" ]
0.6438357
69
Start the fabricca server
func (s *Server) Start() (err error) { log.Infof("Starting server in home directory: %s", s.HomeDir) s.serveError = nil if s.listener != nil { return errors.New("server is already started") } // Initialize the server err = s.init(false) if err != nil { err2 := s.closeDB() if err2 != nil { log.Errorf("Close DB failed: %s", err2) } return err } // Register http handlers s.registerHandlers() log.Debugf("%d CA instance(s) running on server", len(s.caMap)) // Start operations server err = s.startOperationsServer() if err != nil { return err } err = s.Operations.RegisterChecker("server", s) if err != nil { return nil } for _, ca := range s.caMap { startNonceSweeper(ca) } // Start listening and serving err = s.listenAndServe() if err != nil { err2 := s.closeDB() if err2 != nil { log.Errorf("Close DB failed: %s", err2) } return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func startServer(config types.Config, v *Vibranium) *grpc.Server {\n\ts, err := net.Listen(\"tcp\", config.Bind)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\topts := []grpc.ServerOption{grpc.MaxConcurrentStreams(100)}\n\tgrpcServer := grpc.NewServer(opts...)\n\tpb.RegisterCoreRPCServer(grpcServer, v)\n\tgo grpcServer.Serve(s)\n\tlog.Info(\"Cluster started successfully.\")\n\treturn grpcServer\n}", "func RunServer(parentCtx context.Context, eaaCtx *eaaContext) error {\n\tvar err error\n\n\teaaCtx.serviceInfo = services{}\n\teaaCtx.consumerConnections = consumerConns{}\n\teaaCtx.subscriptionInfo = NotificationSubscriptions{}\n\n\tif eaaCtx.certsEaaCa.rca, err = InitRootCA(eaaCtx.cfg.Certs); err != nil {\n\t\tlog.Errf(\"CA cert creation error: %#v\", err)\n\t\treturn err\n\t}\n\n\tif eaaCtx.certsEaaCa.eaa, err = InitEaaCert(eaaCtx.cfg.Certs); err != nil {\n\t\tlog.Errf(\"EAA cert creation error: %#v\", err)\n\t\treturn err\n\t}\n\n\tcertPool, err := CreateAndSetCACertPool(eaaCtx.cfg.Certs.CaRootPath)\n\tif err != nil {\n\t\tlog.Errf(\"Cert Pool error: %#v\", err)\n\t}\n\n\trouter := NewEaaRouter(eaaCtx)\n\tserver := &http.Server{\n\t\tAddr: eaaCtx.cfg.TLSEndpoint,\n\t\tTLSConfig: &tls.Config{\n\t\t\tClientAuth: tls.RequireAndVerifyClientCert,\n\t\t\tClientCAs: certPool,\n\t\t\tMinVersion: tls.VersionTLS12,\n\t\t\tCipherSuites: []uint16{tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256},\n\t\t},\n\t\tHandler: router,\n\t}\n\n\tauthRouter := NewAuthRouter(eaaCtx)\n\tserverAuth := &http.Server{Addr: eaaCtx.cfg.OpenEndpoint,\n\t\tHandler: authRouter}\n\n\tlis, err := net.Listen(\"tcp\", eaaCtx.cfg.TLSEndpoint)\n\tif err != nil {\n\n\t\tlog.Errf(\"net.Listen error: %+v\", err)\n\n\t\te, ok := err.(*os.SyscallError)\n\t\tif ok {\n\t\t\tlog.Errf(\"net.Listen error: %+v\", e.Error())\n\t\t}\n\t\treturn err\n\t}\n\n\tstopServerCh := make(chan bool, 2)\n\n\tgo func(stopServerCh chan bool) {\n\t\t<-parentCtx.Done()\n\t\tlog.Info(\"Executing graceful stop\")\n\t\tif err = server.Close(); err != nil {\n\t\t\tlog.Errf(\"Could not close EAA server: %#v\", err)\n\t\t}\n\t\tif err = serverAuth.Close(); err != nil {\n\t\t\tlog.Errf(\"Could not close Auth server: %#v\", err)\n\t\t}\n\t\tlog.Info(\"EAA server stopped\")\n\t\tlog.Info(\"Auth server stopped\")\n\t\tstopServerCh <- true\n\t}(stopServerCh)\n\n\tdefer log.Info(\"Stopped EAA serving\")\n\n\tgo func(stopServerCh chan bool) {\n\t\tlog.Infof(\"Serving Auth on: %s\", eaaCtx.cfg.OpenEndpoint)\n\t\tif err = serverAuth.ListenAndServe(); err != nil {\n\t\t\tlog.Info(\"Auth server error: \" + err.Error())\n\t\t}\n\t\tlog.Errf(\"Stopped Auth serving\")\n\t\tstopServerCh <- true\n\t}(stopServerCh)\n\n\tlog.Infof(\"Serving EAA on: %s\", eaaCtx.cfg.TLSEndpoint)\n\tutil.Heartbeat(parentCtx, eaaCtx.cfg.HeartbeatInterval, func() {\n\t\t// TODO: implementation of modules checking\n\t\tlog.Info(\"Heartbeat\")\n\t})\n\tif err = server.ServeTLS(lis, eaaCtx.cfg.Certs.ServerCertPath,\n\t\teaaCtx.cfg.Certs.ServerKeyPath); err != http.ErrServerClosed {\n\t\tlog.Errf(\"server.Serve error: %#v\", err)\n\t\treturn err\n\t}\n\t<-stopServerCh\n\t<-stopServerCh\n\treturn nil\n}", "func (ca *CA) Run() error {\n\tvar wg sync.WaitGroup\n\terrs := make(chan error, 1)\n\n\tif !ca.opts.quiet {\n\t\tauthorityInfo := ca.auth.GetInfo()\n\t\tlog.Printf(\"Starting %s\", step.Version())\n\t\tlog.Printf(\"Documentation: https://u.step.sm/docs/ca\")\n\t\tlog.Printf(\"Community Discord: https://u.step.sm/discord\")\n\t\tif step.Contexts().GetCurrent() != nil {\n\t\t\tlog.Printf(\"Current context: %s\", step.Contexts().GetCurrent().Name)\n\t\t}\n\t\tlog.Printf(\"Config file: %s\", ca.getConfigFileOutput())\n\t\tbaseURL := fmt.Sprintf(\"https://%s%s\",\n\t\t\tauthorityInfo.DNSNames[0],\n\t\t\tca.config.Address[strings.LastIndex(ca.config.Address, \":\"):])\n\t\tlog.Printf(\"The primary server URL is %s\", baseURL)\n\t\tlog.Printf(\"Root certificates are available at %s/roots.pem\", baseURL)\n\t\tif len(authorityInfo.DNSNames) > 1 {\n\t\t\tlog.Printf(\"Additional configured hostnames: %s\",\n\t\t\t\tstrings.Join(authorityInfo.DNSNames[1:], \", \"))\n\t\t}\n\t\tfor _, crt := range authorityInfo.RootX509Certs {\n\t\t\tlog.Printf(\"X.509 Root Fingerprint: %s\", x509util.Fingerprint(crt))\n\t\t}\n\t\tif authorityInfo.SSHCAHostPublicKey != nil {\n\t\t\tlog.Printf(\"SSH Host CA Key: %s\\n\", bytes.TrimSpace(authorityInfo.SSHCAHostPublicKey))\n\t\t}\n\t\tif authorityInfo.SSHCAUserPublicKey != nil {\n\t\t\tlog.Printf(\"SSH User CA Key: %s\\n\", bytes.TrimSpace(authorityInfo.SSHCAUserPublicKey))\n\t\t}\n\t}\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tca.runCompactJob()\n\t}()\n\n\tif ca.insecureSrv != nil {\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\tdefer wg.Done()\n\t\t\terrs <- ca.insecureSrv.ListenAndServe()\n\t\t}()\n\t}\n\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\terrs <- ca.srv.ListenAndServe()\n\t}()\n\n\t// wait till error occurs; ensures the servers keep listening\n\terr := <-errs\n\n\twg.Wait()\n\n\treturn err\n}", "func start_cs() *server.Server {\n\treturn Start(Site_CBS, Auth_param+\"@\"+Pool_CBS, Namespace_CBS)\n}", "func start_cs() *gsi.MockServer {\n\treturn gsi.Start_cs(true)\n}", "func (s *Server) Start() (err error) {\n\tdefer func() {\n\t\tif err := recover(); err != nil {\n\t\t\terr = errors.New(fmt.Sprintf(\"%v\", err))\n\t\t}\n\t}()\n\tseedServer, err := service.NewCdnSeedServer(s.Config, s.TaskMgr)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"create seedServer fail\")\n\t}\n\t// start gc\n\ts.GCMgr.StartGC(context.Background())\n\terr = rpc.StartTcpServer(s.Config.ListenPort, s.Config.ListenPort, seedServer)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to start tcp server\")\n\t}\n\treturn nil\n}", "func (runner *McRunner) Start() error {\n\tif runner.State != NotRunning {\n\t\treturn nil\n\t}\n\n\tif !runner.Installed() {\n\t\tfmt.Println(\"Installing server\")\n\t\terr := runner.Install()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\treturn err\n\t\t}\n\t}\n\tfmt.Println(\"Server installed\")\n\n\trunner.applySettings()\n\trunner.cmd = exec.Command(\"java\", \"-jar\", \"forge-universal.jar\", \"-Xms512M\", fmt.Sprintf(\"-Xmx%dM\", runner.Settings.MaxRAM), \"-XX:+UseG1GC\", \"-XX:+UseCompressedOops\", \"-XX:MaxGCPauseMillis=50\", \"-XX:UseSSE=4\", \"-XX:+UseNUMA\", \"nogui\")\n\trunner.cmd.Dir = McServerPath()\n\trunner.inPipe, _ = runner.cmd.StdinPipe()\n\trunner.outPipe, _ = runner.cmd.StdoutPipe()\n\tif runner.Settings.PassthroughStdErr {\n\t\trunner.cmd.Stderr = os.Stderr\n\t}\n\terr := runner.cmd.Start()\n\tif err != nil {\n\t\tfmt.Print(err)\n\t\treturn err\n\t}\n\trunner.State = Starting\n\trunner.startTime = time.Now()\n\n\tif runner.FirstStart {\n\t\trunner.FirstStart = false\n\n\t\t// Initialize McRunner members that aren't initialized yet.\n\t\trunner.killChannel = make(chan bool, 3)\n\t\trunner.tpsChannel = make(chan map[int]float32, 8)\n\t\trunner.playerChannel = make(chan int, 1)\n\n\t\tgo runner.keepAlive()\n\t\tgo runner.updateStatus()\n\t\tgo runner.processCommands()\n\t\tgo runner.processOutput()\n\t}\n\n\treturn err\n}", "func StartServer(config ServerConfig) {\n\tgo control.Run(config.ControlBindAddr)\n\tgo data.Run(config.DataBindAddr)\n\tgo user.Run(config.UserBindAddr, config.CertFile, config.KeyFile, config.ClientCaFile, config.ClientAuth)\n\n\t// Wait for ever\n\tselect {}\n}", "func (s ServerCmd) Run() error {\n\tcaRPCServer, err := ca.NewServer(s.PrivateKeyPath, s.PublicKeyPath, s.SkipConfirmation)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to initialize SSH CA RPC server: %w\", err)\n\t}\n\n\tserver := rpc.NewServer()\n\tserver.RegisterName(ca.ServerName, &caRPCServer)\n\n\tlistener, err := net.Listen(\"tcp\", s.Addr)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to listen on %s: %w\", s.Addr, err)\n\t}\n\tserver.Accept(listener)\n\treturn nil\n}", "func main() {\n\tcountlog.Info(\"Start raft demo\")\n\ts1 := server.NewRaftServerWithEnv(core.NewServerConf(clusters[1], leader, clusters), env)\n\ts2 := server.NewRaftServerWithEnv(core.NewServerConf(clusters[2], leader, clusters), env)\n\ts3 := server.NewRaftServerWithEnv(core.NewServerConf(clusters[3], leader, clusters), env)\n\ts4 := server.NewRaftServerWithEnv(core.NewServerConf(clusters[4], leader, clusters), env)\n\tl1 := server.NewRaftServerWithEnv(core.NewServerConf(leader, leader, clusters), env)\n\tgo s1.Start()\n\tgo s2.Start()\n\tgo s3.Start()\n\tgo s4.Start()\n\tl1.Start()\n}", "func startServer() error {\n\n\tc, err := config()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// checking if a router is defined\n\tif cfgRouter == nil {\n\t\treturn ErrNoRouterConfig\n\t}\n\n\t// HTTPS Server\n\tcfgServer := http.Server{}\n\tcfgServer.Addr = fmt.Sprint(\":\", c.Server.HTTPPort)\n\n\t//TODO write own cors middleware\n\tcorsManager := cors.New(cors.Options{\n\t\tAllowCredentials: true,\n\t\tAllowedOrigins: []string{\"http://localhost:8080\"},\n\t\tAllowedMethods: []string{\"GET\", \"POST\", \"PUT\", \"DELETE\"},\n\t\tAllowedHeaders: []string{\"Authorization\", \"Origin\", \"Cache-Control\", \"Accept\", \"Content-Type\", \"X-Requested-With\"},\n\t\tDebug: true,\n\t})\n\n\t//\tcfgRouter.Handler()\n\tcfgServer.Handler = corsManager.Handler(cfgRouter.Handler())\n\t//cfgServer.Handler = cfgRouter.Handler()\n\n\terr = cfgServer.ListenAndServe()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer cfgServer.Close()\n\n\treturn nil\n}", "func runServer(c *cli.Context) error {\n\tLog.Debug(\"Starting server...\")\n\tpassword := c.String(\"password\")\n\tif password == \"\" {\n\t\tLog.Error(\"Crypto key not defined.\")\n\t\tos.Exit(1)\n\t}\n\tcfg := config.New()\n\tcfg.Log = Log\n\tcfg.McastInterval = time.Duration(c.Uint(\"discovery-interval\"))\n\tcfg.Identity = identity.New(Log)\n\tcfg.McastDestination = c.String(\"mcast-discovery\")\n\tcfg.ListenPort, _ = strconv.Atoi(strings.Split(c.String(\"mcast-discovery\"), \":\")[1])\n\tcfg.Identity.Run()\n\tLog.Info(\"Starting server (hostname=%s)\", cfg.Identity.GetHostname())\n\tinterfaces := strings.Split(c.String(\"interfaces\"), \",\")\n\tLog.Debug(\"Interfaces for autodiscovering: %s\", interfaces)\n\tdiscovery.NewDiscovery(cfg, &interfaces).Run()\n\ttime.Sleep(5 * cfg.McastInterval * time.Second)\n\tLog.Debug(\"Server stopped.\")\n\treturn nil\n}", "func start (args *Args) (bool) {\n \n // initialization\n config := config.Initialize(args.Get(\"-f\"))\n \n // Set log Level and log file path\n log.SetLevel(log.Level(config.Log.Level))\n log.SetOutput(config.Log.Path)\n \n // create a server instance\n server := New(config)\n \n log.Infof(\"Socks5 server is starting....\\n\")\n\n // Start the server \n if (server.Start() != true) {\n log.Errorf(\"Statring socks failed\\n\")\n return false\n }\n \n return true\n}", "func run(cmd *cobra.Command, args []string) {\n\tl := &logrusLogger{l: logger}\n\tsrv, err := tcpause.New(cfg, l)\n\tif err != nil {\n\t\tlogger.WithError(err).Fatal(\"Could not create server\")\n\t}\n\n\t// run it\n\tlogger.Info(\"Starting server\")\n\terr = srv.Start()\n\tif err != nil {\n\t\tlogger.WithError(err).Fatal(\"Failed starting the server\")\n\t}\n\n\tgo func() {\n\t\tfor err := range srv.Errors() {\n\t\t\tlogger.WithError(err).Error(\"Encountered an unexpected error\")\n\t\t}\n\t}()\n\n\t// wait for graceful shutdown\n\twg := new(sync.WaitGroup)\n\twg.Add(1)\n\tgo handleInterrupt(srv, wg)\n\twg.Wait()\n\tos.Exit(0)\n}", "func main() {\n\tservice.StartWebServer(\"8081\")\n}", "func main() {\n\tcalculix := serverCalculix.NewCalculix()\n\terr := rpc.Register(calculix)\n\tif err != nil {\n\t\tfmt.Println(\"Cannot register the calculix\")\n\t\treturn\n\t}\n\trpc.HandleHTTP()\n\tl, e := net.Listen(\"tcp\", \":1234\")\n\tif e != nil {\n\t\tlog.Fatal(\"listen error:\", e)\n\t}\n\terr = http.Serve(l, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Cannot serve the calculix\")\n\t\treturn\n\t}\n}", "func (c *Carnegie) Start() error {\n\tif c.Started {\n\t\treturn nil\n\t}\n\tc.Started = true\n\tgo c.updateCacheLoop()\n\tif certFile, keyFile := c.Config.GetString(\"cert\"), c.Config.GetString(\"key\"); certFile != \"\" && keyFile != \"\" {\n\t\tgo c.Server.ListenAndServeTLS(certFile, keyFile)\n\t}\n\treturn c.Server.ListenAndServe()\n}", "func doSetup(port int, clusterConfig *kiln.ClusterConfig) (*server.Server, string, error) {\n\timageCreator, err := kiln.NewImageCreatorFromEnv()\n\n\tif err != nil {\n\t\treturn nil, \"\", err\n\t}\n\n\tbaseHost := fmt.Sprintf(\"http://localhost:%d\", port)\n\n\ttestServer := server.NewServer(imageCreator, clusterConfig)\n\n\t//start server in the background\n\tgo func() {\n\t\t//start the server and produce it to the start channel\n\t\ttestServer.Start(port, 10*time.Second)\n\t}()\n\n\t//wait for it to start\n\n\thostBase := fmt.Sprintf(\"%s/organizations\", baseHost)\n\n\tstarted := false\n\n\t//wait for host to start for 10 seconds\n\tfor i := 0; i < 20; i++ {\n\n\t\thost := fmt.Sprintf(\"localhost:%d\", port)\n\n\t\tconn, err := net.Dial(\"tcp\", host)\n\n\t\t//done waiting, continue\n\t\tif err == nil {\n\t\t\tconn.Close()\n\t\t\tstarted = true\n\t\t\tbreak\n\t\t}\n\n\t\ttime.Sleep(500 * time.Millisecond)\n\t}\n\n\tif !started {\n\t\treturn nil, \"\", errors.New(\"Server did not start\")\n\t}\n\n\treturn testServer, hostBase, nil\n}", "func main() {\n\tportNo := os.Args[1]\n\tstartServerMode(portNo)\n}", "func (as *AgentServer) Start(port string) {\n\tas.server.Addr = \":\" + port\n\terr := as.server.ListenAndServeTLS(as.certPath, as.keyPath)\n\tif err != nil {\n\t\tas.log.Error(err)\n\t\tos.Exit(1)\n\t}\n}", "func (s *UAA) Start() {\n\ts.server.Start()\n}", "func (s *server) Run(ctx context.Context) {\n\tif s.banner {\n\t\tfmt.Printf(\"%s\\n\\n\", config.Banner)\n\t}\n\n\tet, err := NewEchoTCP(s.address, s.verbose)\n\tif err != nil {\n\t\tlog.Fatal(err) // exit if creating EchoTCP is failed.\n\t}\n\tdefer et.listener.Close()\n\n\tfmt.Printf(\"server is started at %s\\n\", s.address)\n\tet.Run(ctx)\n}", "func (s *Server) Run() error {\n\tbg := s.logger.Bg()\n\tlis, err := net.Listen(\"tcp\", s.hostPort)\n\n\tif err != nil {\n\t\tbg.Fatal(\"Unable to start server\", zap.Error(err))\n\t\treturn err\n\t}\n\n\tbg.Info(\"Starting\", zap.String(\"address\", \"tcp://\"+s.hostPort))\n\treturn s.Gs.Serve(lis)\n}", "func (sf *Client) Start() error {\n\tif sf.option.server == nil {\n\t\treturn errors.New(\"empty remote server\")\n\t}\n\n\tgo sf.running()\n\treturn nil\n}", "func server(details *RunDetails) {\n\tdetails.Command = CommandServer\n\tdetails.MainClass = ServerMain\n\n\t// If the main class environment variable is set then use that\n\t// otherwise run Coherence DCS.\n\tmc, found := details.LookupEnv(v1.EnvVarAppMainClass)\n\tswitch {\n\tcase found && details.AppType != AppTypeSpring:\n\t\t// we have a main class specified and we're not a Spring Boot app\n\t\tdetails.MainArgs = []string{mc}\n\tcase found && details.AppType == AppTypeSpring:\n\t\t// we have a main class and the app is Spring Boot\n\t\t// the main is PropertiesLauncher,\n\t\tdetails.MainClass = SpringBootMain\n\t\t// the specified main class is set as a Spring loader property\n\t\tdetails.AddArg(\"-Dloader.main=\" + mc)\n\tcase !found && details.AppType == AppTypeSpring:\n\t\t// the app type is Spring Boot so main is PropertiesLauncher\n\t\tdetails.MainClass = SpringBootMain\n\tcase !found && details.AppType == AppTypeCoherence:\n\t\t// the app type is Coherence so main is DCS\n\t\tdetails.MainArgs = []string{DCS}\n\tcase !found && details.AppType == AppTypeHelidon:\n\t\t// the app type is Helidon so main is the Helidon CDI starter\n\t\tdetails.MainArgs = []string{HelidonMain}\n\tdefault:\n\t\t// no main or app type specified, use DCS\n\t\tdetails.MainArgs = []string{DCS}\n\t}\n\n\t// Check for any main class arguments\n\tma, found := details.LookupEnv(v1.EnvVarAppMainArgs)\n\tif found {\n\t\tif ma != \"\" {\n\t\t\tdetails.MainArgs = append(details.MainArgs, strings.Split(ma, \" \")...)\n\t\t}\n\t}\n\n\t// Configure the Coherence member's role\n\tdetails.SetSystemPropertyFromEnvVarOrDefault(v1.EnvVarCohRole, \"-Dcoherence.role\", \"storage\")\n\t// Configure whether this member is storage enabled\n\tdetails.AddArgFromEnvVar(v1.EnvVarCohStorage, \"-Dcoherence.distributed.localstorage\")\n\n\t// Configure Coherence Tracing\n\tratio := details.Getenv(v1.EnvVarCohTracingRatio)\n\tif ratio != \"\" {\n\t\tq, err := resource.ParseQuantity(ratio)\n\t\tif err == nil {\n\t\t\td := q.AsDec()\n\t\t\tdetails.AddArg(\"-Dcoherence.tracing.ratio=\" + d.String())\n\t\t} else {\n\t\t\tfmt.Printf(\"ERROR: Coherence tracing ratio \\\"%s\\\" is invalid - %s\\n\", ratio, err.Error())\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\n\t// Configure whether Coherence management is enabled\n\thasMgmt := details.IsEnvTrue(v1.EnvVarCohMgmtPrefix + v1.EnvVarCohEnabledSuffix)\n\tfmt.Printf(\"INFO: Coherence Management over REST (%s%s=%t)\\n\", v1.EnvVarCohMgmtPrefix, v1.EnvVarCohEnabledSuffix, hasMgmt)\n\tif hasMgmt {\n\t\tfmt.Println(\"INFO: Configuring Coherence Management over REST\")\n\t\tdetails.AddArg(\"-Dcoherence.management.http=all\")\n\t\tif details.CoherenceHome != \"\" {\n\t\t\t// If management is enabled and the COHERENCE_HOME environment variable is set\n\t\t\t// then $COHERENCE_HOME/lib/coherence-management.jar will be added to the classpath\n\t\t\tdetails.AddClasspath(details.CoherenceHome + \"/lib/coherence-management.jar\")\n\t\t}\n\t}\n\n\t// Configure whether Coherence metrics is enabled\n\thasMetrics := details.IsEnvTrue(v1.EnvVarCohMetricsPrefix + v1.EnvVarCohEnabledSuffix)\n\tfmt.Printf(\"INFO: Coherence Metrics (%s%s=%t)\\n\", v1.EnvVarCohMetricsPrefix, v1.EnvVarCohEnabledSuffix, hasMgmt)\n\tif hasMetrics {\n\t\tdetails.AddArg(\"-Dcoherence.metrics.http.enabled=true\")\n\t\tfmt.Println(\"INFO: Configuring Coherence Metrics\")\n\t\tif details.CoherenceHome != \"\" {\n\t\t\t// If metrics is enabled and the COHERENCE_HOME environment variable is set\n\t\t\t// then $COHERENCE_HOME/lib/coherence-metrics.jar will be added to the classpath\n\t\t\tdetails.AddClasspath(details.CoherenceHome + \"/lib/coherence-metrics.jar\")\n\t\t}\n\t}\n\n\t// Configure whether to add third-party modules to the classpath if management over rest\n\t// or metrics are enabled and the directory pointed to by the DEPENDENCY_MODULES environment\n\t// variable exists.\n\tif hasMgmt || hasMetrics {\n\t\tdm := details.Getenv(v1.EnvVarCohDependencyModules)\n\t\tif dm != \"\" {\n\t\t\tstat, err := os.Stat(dm)\n\t\t\tif err == nil && stat.IsDir() {\n\t\t\t\t// dependency modules directory exists\n\t\t\t\tdetails.AddClasspath(dm + \"/*\")\n\t\t\t}\n\t\t}\n\t}\n\n\tif details.IsEnvTrueOrBlank(v1.EnvVarJvmShowSettings) {\n\t\tdetails.AddArg(\"-XshowSettings:all\")\n\t\tdetails.AddArg(\"-XX:+PrintCommandLineFlags\")\n\t\tdetails.AddArg(\"-XX:+PrintFlagsFinal\")\n\t}\n\n\t// Add GC logging parameters if required\n\tif details.IsEnvTrue(v1.EnvVarJvmGcLogging) {\n\t\tdetails.AddArg(\"-verbose:gc\")\n\t\tdetails.AddArg(\"-XX:+PrintGCDetails\")\n\t\tdetails.AddArg(\"-XX:+PrintGCTimeStamps\")\n\t\tdetails.AddArg(\"-XX:+PrintHeapAtGC\")\n\t\tdetails.AddArg(\"-XX:+PrintTenuringDistribution\")\n\t\tdetails.AddArg(\"-XX:+PrintGCApplicationStoppedTime\")\n\t\tdetails.AddArg(\"-XX:+PrintGCApplicationConcurrentTime\")\n\t}\n}", "func (server Server) Run() error {\n\terr := server.supervisor.SpawnClient()\n\tif err != nil {\n\t\tserver.logger.Fatalf(\"Error in starting client: %s\", err)\n\t}\n\n\tgo listenForSMS(server.upstreamChannel, server.logger)\n\tserver.logger.Info(\"Listening for SMS\")\n\tserver.logger.Info(\"Starting Webserver\")\n\n\treturn server.webserver.Server.ListenAndServe()\n}", "func (s *SamFSServer) Run() error {\n\tlis, err := net.Listen(\"tcp\", s.port)\n\tif err != nil {\n\t\tglog.Fatalf(\"falied to listen on port :: %s(err=%s)\", s.port, err.Error())\n\t\treturn err\n\t}\n\n\trand.Seed(time.Now().UnixNano())\n\ts.sessionID = rand.Int63()\n\tglog.Infof(\"starting new server with sessionID %d\", s.sessionID)\n\n\tgs := grpc.NewServer()\n\tpb.RegisterNFSServer(gs, s)\n\ts.grpcServer = gs\n\treturn gs.Serve(lis)\n}", "func startServerMode() {\n\t// Create or open log directory\n\tf, err := os.OpenFile(WORKDIR+`/server.log`, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\tl(err.Error(), true, true)\n\t}\n\tdefer f.Close()\n\tlog.SetOutput(f)\n\tl(\"Starting server...\", false, true)\n\tvar listener net.Listener\n\tif appConfig.Tls {\n\t\tcert, err := tls.LoadX509KeyPair(WORKDIR+\"/cert.pem\", WORKDIR+\"/key.pem\")\n\t\tcheckErr(\"Unable to import TLS certificates\", err, true)\n\t\tconfig := tls.Config{Certificates: []tls.Certificate{cert}}\n\t\tnow := time.Now()\n\t\tconfig.Time = func() time.Time { return now }\n\t\tconfig.Rand = rand.Reader\n\t\tlistener, err = tls.Listen(\"tcp\", appConfig.Server.Address+\":\"+appConfig.Server.Port, &config)\n\t\tcheckErr(\"Unable to create TLS listener\", err, false)\n\t} else {\n\t\tvar err error\n\t\tlistener, err = net.Listen(\"tcp\", appConfig.Server.Address+\":\"+appConfig.Server.Port)\n\t\tcheckErr(\"Unable to create listener\", err, true)\n\t}\n\tgo server.start()\n\tif len(appConfig.Api.Port) > 0 {\n\t\tgo startHttpServer()\n\t}\n\tfor {\n\t\tconnection, err := listener.Accept()\n\t\tcheckErr(\"Unable to accept incoming connection\", err, true)\n\t\tclient := &Client{socket: connection, data: make(chan Job)}\n\t\tserver.register <- client\n\t\tgo server.receive(client)\n\t\tgo server.send(client)\n\t}\n}", "func (s *grpcServer) Run(ctx context.Context, ready func()) error {\n\tlogger := log.WithContext(ctx)\n\ts.server.Init(ctx, nil)\n\tlistener, err := net.Listen(\"tcp\", s.cfg.Address)\n\tif err != nil {\n\t\tlogger.WithError(err).WithField(\"address\", s.cfg.Address).Error(\"unable to listen tcp address\")\n\t\treturn err\n\t}\n\n\tlogger.Info(\"starting of grpc server...\")\n\ts.server.Init(ctx, nil)\n\tmaster.RegisterMasterServer(s.server.server, s.server)\n\tif err := s.server.server.Serve(listener); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func StartCLCRunnerServer(extraHandlers map[string]http.Handler) error {\n\t// create the root HTTP router\n\tr := mux.NewRouter()\n\n\t// IPC REST API server\n\tv1.SetupHandlers(r.PathPrefix(\"/api/v1\").Subrouter())\n\n\t// Register extra hanlders\n\tfor path, handler := range extraHandlers {\n\t\tr.Handle(path, handler)\n\t}\n\n\t// Validate token for every request\n\tr.Use(validateCLCRunnerToken)\n\n\t// get the transport we're going to use under HTTP\n\tvar err error\n\tclcListener, err = getCLCRunnerListener()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to create the clc runner api server: %v\", err)\n\t}\n\n\t// CLC Runner token\n\t// Use the Cluster Agent token\n\terr = util.InitDCAAuthToken()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thosts := []string{\"127.0.0.1\", \"localhost\", config.Datadog.GetString(\"clc_runner_host\")}\n\t_, rootCertPEM, rootKey, err := security.GenerateRootCert(hosts, 2048)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"unable to start TLS server: %v\", err)\n\t}\n\n\t// PEM encode the private key\n\trootKeyPEM := pem.EncodeToMemory(&pem.Block{\n\t\tType: \"RSA PRIVATE KEY\", Bytes: x509.MarshalPKCS1PrivateKey(rootKey),\n\t})\n\n\t// Create a TLS cert using the private key and certificate\n\trootTLSCert, err := tls.X509KeyPair(rootCertPEM, rootKeyPEM)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"invalid key pair: %v\", err)\n\t}\n\n\ttlsConfig := tls.Config{\n\t\tCertificates: []tls.Certificate{rootTLSCert},\n\t\tMinVersion: tls.VersionTLS13,\n\t}\n\n\t// Use a stack depth of 4 on top of the default one to get a relevant filename in the stdlib\n\tlogWriter, _ := config.NewLogWriter(4, seelog.WarnLvl)\n\n\tsrv := &http.Server{\n\t\tHandler: r,\n\t\tErrorLog: stdLog.New(logWriter, \"Error from the clc runner http API server: \", 0), // log errors to seelog,\n\t\tTLSConfig: &tlsConfig,\n\t\tWriteTimeout: config.Datadog.GetDuration(\"clc_runner_server_write_timeout\") * time.Second,\n\t\tReadHeaderTimeout: config.Datadog.GetDuration(\"clc_runner_server_readheader_timeout\") * time.Second,\n\t}\n\ttlsListener := tls.NewListener(clcListener, &tlsConfig)\n\n\tgo srv.Serve(tlsListener) //nolint:errcheck\n\treturn nil\n}", "func Start(e *echo.Echo) {\n Connect()\n e.Logger.Fatal(e.Start(\":\" + os.Getenv(\"PORT\")))\n}", "func (r *RuntimeServer) Start(errchan chan error) {\n\tgo func() {\n\t\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", r.conf.HostIP, r.conf.ServerPort))\n\t\tif err != nil {\n\t\t\tlogrus.Errorf(\"failed to listen: %v\", err)\n\t\t\terrchan <- err\n\t\t}\n\t\tif err := r.server.Serve(lis); err != nil {\n\t\t\terrchan <- err\n\t\t}\n\t}()\n\tif err := r.registServer(); err != nil {\n\t\terrchan <- err\n\t}\n\tlogrus.Infof(\"runtime server start success\")\n}", "func main() {\n\tif runtime.GOOS != \"linux\" {\n\t\tfmt.Println(aurora.Red(\"Sorry mate, this is a Linux app\"))\n\t\treturn\n\t}\n\n\te := echo.New()\n\te.HideBanner = true\n\te.Debug = true\n\te.Server.ReadTimeout = 1 * time.Minute\n\te.Server.WriteTimeout = 1 * time.Minute\n\n\tport := \"10591\"\n\tif os.Getenv(\"PORT\") != \"\" {\n\t\tport = os.Getenv(\"PORT\")\n\t}\n\n\t// Middleware\n\te.Use(middleware.Logger())\n\te.Use(middleware.Recover())\n\te.Use(middleware.CORS())\n\n\te.GET(\"info\", getInfo)\n\te.POST(\"upgrade\", doUpgrade)\n\n\t// Start the service\n\te.Logger.Fatal(e.Start(\":\" + port))\n}", "func (a *Server) Start(ctx context.Context) {\n\ta.log.Infof(\"starting tap API server on %s\", a.Server.Addr)\n\tif err := a.ServeTLS(a.listener, \"\", \"\"); err != nil {\n\t\tif errors.Is(err, http.ErrServerClosed) {\n\t\t\treturn\n\t\t}\n\t\ta.log.Fatal(err)\n\t}\n}", "func StartServer(cleanUpChan chan int){\n\tGrpcServer = &Server{\n CleanUpChan:cleanUpChan ,\n\t GrpcServer: grpc.NewServer(),\n\t}\n\tregisterGrpcServices(GrpcServer.GrpcServer)\n\tif err := GrpcServer.GrpcServer.Serve(getListner(port)); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (ds *dserver) Start() error {\n\tvar cfg *config.Config\n\tif err := ds.cont.Invoke(func(c *config.Config) { cfg = c }); err != nil {\n\t\treturn err\n\t}\n\treturn ds.router.Run(fmt.Sprintf(\":%s\", cfg.Port))\n}", "func (s *FrontendServer) Run(tls bool, certFile string,\n\tkeyFile string) error {\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", s.hostname, s.port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\tvar lis2 net.Listener\n\tif (s.hostnameGw != \"\") && (s.portGw != 0) {\n\t\tlis2, err = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", s.hostnameGw, s.portGw))\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to listen on second address: %v\", err)\n\t\t}\n\t}\n\tvar opts []grpc.ServerOption\n\tif tls {\n\t\t// if caFile == \"\" {\n\t\t// \tset default caFile path\n\t\t// }\n\t\t// if keyFile == \"\" {\n\t\t// \tset default keyFile path\n\t\t// }\n\t\tcreds, err := credentials.NewServerTLSFromFile(certFile, keyFile)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to generate credentials %v\", err)\n\t\t}\n\t\topts = []grpc.ServerOption{grpc.Creds(creds)}\n\t}\n\tgrpcServer := grpc.NewServer(opts...)\n\tpb.RegisterFrontendServer(grpcServer, s)\n\tgo grpcServer.Serve(lis)\n\tif lis2 != nil {\n\t\tgo grpcServer.Serve(lis2)\n\t}\n\treturn nil\n}", "func Start() error {\n\t// start the microbox server\n\tif err := server.Setup(); err != nil {\n\t\treturn err\n\t}\n\n\t// run a provider setup\n\treturn provider.Setup()\n}", "func RunServer(port int, ctx context.Context) error {\n\t// Set up and start the file watcher.\n\twatcher, err := fsnotify.NewWatcher()\n\tif watcher == nil || err != nil {\n\t\tklog.Errorf(\"failed to create file watcher, cert/key rotation will be disabled %v\", err)\n\t} else {\n\t\tdefer watcher.Close()\n\n\t\t// In HyperShift the CA is mounted in\n\t\tif !ntoconfig.InHyperShift() {\n\t\t\tif err := util.Mkdir(authCADir); err != nil {\n\t\t\t\treturn fmt.Errorf(\"failed to create directory %q: %v\", authCADir, err)\n\t\t\t}\n\t\t}\n\n\t\tif err = watcher.Add(authCADir); err != nil {\n\t\t\tklog.Errorf(\"failed to add %v to watcher, CA client authentication and rotation will be disabled: %v\", authCADir, err)\n\t\t} else {\n\t\t\tif ok, _ := fileExistsAndNotEmpty(authCAFile); !ok {\n\t\t\t\t// authCAFile does not exist (or is empty); wait for it to be created.\n\t\t\t\tselect {\n\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\treturn nil\n\t\t\t\tcase event := <-watcher.Events:\n\t\t\t\t\tklog.V(2).Infof(\"event from filewatcher on file: %v, event: %v\", event.Name, event.Op)\n\n\t\t\t\t\tif event.Name == authCAFile {\n\t\t\t\t\t\tif ok, _ := fileExistsAndNotEmpty(authCAFile); ok {\n\t\t\t\t\t\t\t// authCAFile is now created and is not empty.\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\tcase err = <-watcher.Errors:\n\t\t\t\t\tklog.Warningf(\"error from metrics server CA client authentication file watcher: %v\", err)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif err = watcher.Add(tlsSecretDir); err != nil {\n\t\t\tklog.Errorf(\"failed to add %v to watcher, cert/key rotation will be disabled: %v\", tlsSecretDir, err)\n\t\t}\n\t}\n\n\tsrv := buildServer(port)\n\tif srv == nil {\n\t\treturn fmt.Errorf(\"failed to build server with port %d\", port)\n\t}\n\n\tgo startServer(srv)\n\n\torigCertChecksum := checksumFile(tlsCert)\n\torigKeyChecksum := checksumFile(tlsKey)\n\torigAuthCAChecksum := checksumFile(authCAFile)\n\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\tstopServer(srv)\n\t\t\treturn nil\n\t\tcase event := <-watcher.Events:\n\t\t\tklog.V(2).Infof(\"event from filewatcher on file: %v, event: %v\", event.Name, event.Op)\n\n\t\t\tif event.Op == fsnotify.Chmod || event.Op == fsnotify.Remove {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif certsChanged(origCertChecksum, origKeyChecksum, origAuthCAChecksum) {\n\t\t\t\t// Update file checksums with latest files.\n\t\t\t\torigCertChecksum = checksumFile(tlsCert)\n\t\t\t\torigKeyChecksum = checksumFile(tlsKey)\n\t\t\t\torigAuthCAChecksum = checksumFile(authCAFile)\n\n\t\t\t\t// restart server\n\t\t\t\tklog.Infof(\"restarting metrics server to rotate certificates\")\n\t\t\t\tstopServer(srv)\n\t\t\t\tsrv = buildServer(port)\n\t\t\t\tgo startServer(srv)\n\t\t\t}\n\t\tcase err = <-watcher.Errors:\n\t\t\tklog.Warningf(\"error from metrics server certificate file watcher: %v\", err)\n\t\t}\n\t}\n}", "func (m ManagedServer) Start(port int, rawPrivateKeys [][]byte, ciphers, macs []string) {\n\tm.lg.InfoD(\"starting-server\", meta{\n\t\t\"port\": port,\n\t\t\"ciphers\": ciphers,\n\t\t\"macs\": macs,\n\t})\n\n\tprivateKeys := []ssh.Signer{}\n\tfor i, rawKey := range rawPrivateKeys {\n\t\tprivateKey, err := ssh.ParsePrivateKey(rawKey)\n\t\tif err != nil {\n\t\t\tm.errorAndAlert(\"private-key-parse\", meta{\"index\": i, \"error\": err.Error()})\n\t\t\tos.Exit(1)\n\t\t}\n\t\tprivateKeys = append(privateKeys, privateKey)\n\t}\n\n\tlistener, err := net.Listen(\"tcp\", fmt.Sprintf(\"0.0.0.0:%v\", port))\n\tproxyList := proxyproto.Listener{Listener: listener}\n\n\tif err != nil {\n\t\tm.errorAndAlert(\"listen-fail\", meta{\n\t\t\t\"msg\": \"failed to open socket\",\n\t\t\t\"error\": err.Error(),\n\t\t\t\"port\": port})\n\t}\n\tm.lg.InfoD(\"listening\", meta{\"address\": proxyList.Addr().String()})\n\n\tfor {\n\t\tnewConn, err := proxyList.Accept()\n\t\tif err != nil {\n\t\t\tm.errorAndAlert(\"listener-accept-fail\", meta{\"error\": err.Error()})\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\tgo func(conn net.Conn) {\n\t\t\tvar driver ServerDriver\n\t\t\tconfig := &ssh.ServerConfig{\n\t\t\t\tConfig: ssh.Config{\n\t\t\t\t\tCiphers: ciphers,\n\t\t\t\t\tMACs: macs,\n\t\t\t\t},\n\t\t\t\tPasswordCallback: func(c ssh.ConnMetadata, pass []byte) (*ssh.Permissions, error) {\n\t\t\t\t\tdriver = m.driverGenerator(LoginRequest{\n\t\t\t\t\t\tUsername: c.User(),\n\t\t\t\t\t\tPassword: string(pass),\n\t\t\t\t\t\tPublicKey: \"\",\n\t\t\t\t\t\tRemoteAddr: c.RemoteAddr(),\n\t\t\t\t\t})\n\t\t\t\t\tif driver == nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"password rejected for %q\", c.User())\n\t\t\t\t\t}\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\tPublicKeyCallback: func(c ssh.ConnMetadata, key ssh.PublicKey) (*ssh.Permissions, error) {\n\t\t\t\t\tdriver = m.driverGenerator(LoginRequest{\n\t\t\t\t\t\tUsername: c.User(),\n\t\t\t\t\t\tPassword: \"\",\n\t\t\t\t\t\tPublicKey: strings.TrimSpace(string(ssh.MarshalAuthorizedKey(key))),\n\t\t\t\t\t\tRemoteAddr: c.RemoteAddr(),\n\t\t\t\t\t})\n\t\t\t\t\tif driver == nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"password rejected for %q\", c.User())\n\t\t\t\t\t}\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t}\n\t\t\tfor _, privateKey := range privateKeys {\n\t\t\t\tconfig.AddHostKey(privateKey)\n\t\t\t}\n\n\t\t\t_, newChan, requestChan, err := ssh.NewServerConn(conn, config)\n\t\t\tif err != nil {\n\t\t\t\tif err != io.EOF {\n\t\t\t\t\tm.errorAndAlert(\"handshake-failure\", meta{\"error\": err.Error()})\n\t\t\t\t}\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tgo ssh.DiscardRequests(requestChan)\n\n\t\t\tfor newChannelRequest := range newChan {\n\t\t\t\tif newChannelRequest.ChannelType() != \"session\" {\n\t\t\t\t\tnewChannelRequest.Reject(ssh.UnknownChannelType, \"unknown channel type\")\n\t\t\t\t\tm.errorAndAlert(\"unknown-channel-type\", meta{\"type\": newChannelRequest.ChannelType()})\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tchannel, requests, err := newChannelRequest.Accept()\n\t\t\t\tif err != nil {\n\t\t\t\t\tif err != io.EOF {\n\t\t\t\t\t\tm.errorAndAlert(\"channel-accept-failure\", meta{\n\t\t\t\t\t\t\t\"err\": err.Error(),\n\t\t\t\t\t\t\t\"type\": newChannelRequest.ChannelType()})\n\t\t\t\t\t}\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tgo func(in <-chan *ssh.Request) {\n\t\t\t\t\tfor req := range in {\n\t\t\t\t\t\tok := false\n\t\t\t\t\t\tswitch req.Type {\n\t\t\t\t\t\tcase \"subsystem\":\n\t\t\t\t\t\t\tif len(req.Payload) >= 4 {\n\t\t\t\t\t\t\t\t// we reject all SSH requests that are not SFTP\n\t\t\t\t\t\t\t\tif string(req.Payload[4:]) == \"sftp\" {\n\t\t\t\t\t\t\t\t\tok = true\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\treq.Reply(ok, nil)\n\t\t\t\t\t}\n\t\t\t\t}(requests)\n\n\t\t\t\tserver, err := NewServer(channel, driver)\n\t\t\t\tif err != nil {\n\t\t\t\t\tm.errorAndAlert(\"server-creation-err\", meta{\"err\": err.Error()})\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tif err := server.Serve(); err != nil {\n\t\t\t\t\tchannel.Close()\n\t\t\t\t}\n\t\t\t}\n\t\t}(newConn)\n\t}\n}", "func newStartCmd() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"start\",\n\t\tShort: \"Starts listening for cluster connections\",\n\t\tRun: func(cmd *cobra.Command, args []string) {\n\t\t\tif err := config.Read(); err != nil {\n\t\t\t\tlog.Fatalf(\"Failed to read config files: %s\", err)\n\t\t\t}\n\n\t\t\tport, _ := cmd.Flags().GetInt(\"port\")\n\t\t\tid, _ := cmd.Flags().GetString(\"id\")\n\t\t\tbootPeers, _ := cmd.Flags().GetStringSlice(\"boot-peer\")\n\n\t\t\ts := sbs.NewServerWithPort(port)\n\t\t\tif id != \"\" {\n\t\t\t\ts.SetPeerID(id)\n\t\t\t}\n\t\t\tdefer s.Shutdown()\n\n\t\t\tif err := s.Listen(); err != nil {\n\t\t\t\tlog.Fatalf(\"failed to start listening to remote connections: %s\", err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ts.BootPeers(bootPeers)\n\n\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\tif err := s.AddVolume(&volumesAPI.Volume{\n\t\t\t\tId: \"vol-test\",\n\t\t\t\tAccount: 1,\n\t\t\t\tSize: 10,\n\t\t\t}, s.PeerIDs()); err != nil {\n\t\t\t\tlog.Fatalf(\"failed to start volume: %s\", err)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\ts.Export(\"vol-test\")\n\n\t\t\t//Wait and gracefully shutdown\n\t\t\tutils.BlockUntilSigTerm()\n\t\t},\n\t}\n\n\tcmd.Flags().IntP(\"port\", \"p\", sbs.DefaultListenPort, \"quic listening port\")\n\tcmd.Flags().String(\"id\", \"\", \"manual peer ID. Defaults to machine ID\")\n\tcmd.Flags().StringSlice(\"boot-peer\", []string{}, \"a list of peers to connect to on boot in format 'peerID@addr:port'\")\n\n\treturn cmd\n}", "func (authService *Controller) Start(listeningPort, certFile, keyFile string) error {\n\tif err := authService.configureToken(); err != nil {\n\t\treturn err\n\t}\n\n\t// populate the lists of ClusterRoles to bind in the different peering states.\n\tif err := authService.populatePermission(); err != nil {\n\t\treturn err\n\t}\n\n\trouter := httprouter.New()\n\n\trouter.POST(auth.CertIdentityURI, authService.identity)\n\trouter.GET(auth.IdsURI, authService.ids)\n\n\tvar err error\n\tif authService.useTLS {\n\t\terr = http.ListenAndServeTLS(strings.Join([]string{\":\", listeningPort}, \"\"), certFile, keyFile, router)\n\t} else {\n\t\terr = http.ListenAndServe(strings.Join([]string{\":\", listeningPort}, \"\"), router)\n\t}\n\tif err != nil {\n\t\tklog.Error(err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func start(c *cli.Context) error {\n\tif !isSystemRunning() {\n\t\treturn nil\n\t}\n\n\t_, _, _, controllers := getIPAddresses()\n\tsendCommandToControllers(controllers, \"StartReaders\", \"\")\n\tsendCommandToControllers(controllers, \"StartWriters\", \"\")\n\t//\tsendCommandToControllers(controllers, \"StartServers\", \"\")\n\treturn nil\n}", "func StartClient(port string) {\n\tfmt.Println(\"StartClient\")\n}", "func main() {\n\t// starting server\n\tcuxs.StartServer(engine.Router())\n}", "func startHTTPSServer(ch chan<- bool) {\n\tserver := http.Server{\n\t\tAddr: \":443\",\n\t}\n\tlog.Println(\"HTTPS server started (listening on port 443).\")\n\tlog.Println(\"HTTPS server stopped with error:\", server.ListenAndServeTLS(filepath.Join(rootPath, \"static/certificate/fullchain.cer\"), filepath.Join(rootPath, \"static/certificate/www.yuzuka.tk.key\")))\n\tch <- true\n}", "func Run() error {\n\tgo StartServer()\n\n\tlis, err := net.Listen(\"tcp\", \":50051\")\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\n\ts := grpc.NewServer()\n\n\tklessapi.RegisterKlessAPIServer(s, &apiserver.APIServer{})\n\tif err := s.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n\treturn nil\n}", "func (s *Server) Run() error {\n\ttcpAddr, err := net.ResolveTCPAddr(\"tcp\", s.conf.Addr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar listener net.Listener\n\tlistener, err = net.ListenTCP(\"tcp\", tcpAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif s.conf.TLSConfig != nil {\n\t\tlistener = tls.NewListener(listener, s.conf.TLSConfig)\n\t}\n\terr = s.Serve(listener)\n\treturn err\n}", "func StartRedfishServer(ctx context.Context, listener net.Listener, machine Machine) error {\n\tserv := &well.HTTPServer{\n\t\tServer: &http.Server{\n\t\t\tHandler: prepareRouter(machine),\n\t\t},\n\t}\n\n\tcertPem, keyPem, err := generateCertificate(\"placemat.com\", 36500*24*time.Hour)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to generate certificate: %w\", err)\n\t}\n\n\tgo func() {\n\t\t<-ctx.Done()\n\t\tlistener.Close()\n\t\tserv.Close()\n\t}()\n\n\tcert, err := tls.X509KeyPair(certPem, keyPem)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcfg := &tls.Config{Certificates: []tls.Certificate{cert}}\n\ttlsListener := tls.NewListener(listener, cfg)\n\tif err := serv.Server.Serve(tlsListener); err != nil {\n\t\tlog.Error(\"failed to serve TLS\", map[string]interface{}{\n\t\t\tlog.FnError: err,\n\t\t})\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func main() {\n logging.SetLevel(logging.DEBUG)\n\n no_tls := flag.Bool(\"no-tls\", false, \"Whether to use TLS, default false\")\n crt_path := flag.String(\"crt\", \"cert.crt\", \"Path to the TLS certificate, default `cert.crt`\")\n key_path := flag.String(\"key\", \"key.key\", \"Path to the TLS key, default `key.key`\")\n sname := flag.String(\"srv-name\", \"mycroft\", \"This server's name for SNI\")\n\n flag.Parse()\n\n logging.Info(\"Starting Server ...\")\n\n if *no_tls {\n logging.Warning(\"not using TLS\")\n err := srv.StartListen(1847, false, \"\", \"\", \"\")\n if err != nil {\n logging.Fatal(\"Could not start server: \", err.Error())\n }\n } else {\n err := srv.StartListen(1847, true, *crt_path, *key_path, *sname)\n if err != nil {\n logging.Fatal(\"Could not start server: \", err.Error())\n }\n }\n}", "func main() {\n\tfmt.Println(\"Starting management container...\")\n\tservice.Start()\n\tlog.Info(service.Container().Stats())\n\trouter := service.Container().Router\n\tsetupWebapp := service.SetupWebApplication(router)\n\trouter.PathPrefix(\"/setup\").HandlerFunc(setupWebapp.HandleRequest)\n\tmonitorWebapp := service.MonitorWebApplication(router)\n\trouter.PathPrefix(\"/\").HandlerFunc(monitorWebapp.HandleRequest)\n\tstartup()\n}", "func main() {\n\tserver.StartUp(false)\n}", "func startServer(wg *sync.WaitGroup) {\n\tdefer wg.Done()\n\n\tlistener, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", MyHandle.Host, MyHandle.Port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to startServer: %v\", err)\n\t}\n\n\tgrpcServer := grpc.NewServer()\n\tapi.RegisterGoChatServer(grpcServer, &chatServer{})\n\n\terr = grpcServer.Serve(listener)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func startDaemon() error {\n\t// Establish multithreading.\n\truntime.GOMAXPROCS(runtime.NumCPU())\n\n\t// Create all of the modules.\n\tgateway, err := gateway.New(config.Siad.RPCaddr, filepath.Join(config.Siad.SiaDir, modules.GatewayDir))\n\tif err != nil {\n\t\treturn err\n\t}\n\tcs, err := consensus.New(gateway, filepath.Join(config.Siad.SiaDir, modules.ConsensusDir))\n\tif err != nil {\n\t\treturn err\n\t}\n\texplorer, err := explorer.New(cs, filepath.Join(config.Siad.SiaDir, modules.ExplorerDir))\n\tif err != nil {\n\t\treturn err\n\t}\n\tsrv, err := api.NewServer(config.Siad.APIaddr, cs, gateway, nil, nil, nil, nil, nil, explorer)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Bootstrap to the network.\n\tif !config.Siad.NoBootstrap {\n\t\tfor i := range modules.BootstrapPeers {\n\t\t\tgo gateway.Connect(modules.BootstrapPeers[i])\n\t\t}\n\t}\n\n\t// Send a struct down the started channel, so the testing package knows\n\t// that daemon startup has completed. A gofunc is used with the hope that\n\t// srv.Serve() will start running before the value is sent down the\n\t// channel.\n\tgo func() {\n\t\tstarted <- struct{}{}\n\t}()\n\n\t// Start serving api requests.\n\terr = srv.Serve()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func Start() {\n\tflag.Parse()\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"localhost:%d\", *port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\tvar opts []grpc.ServerOption\n\tif *tls {\n\t\tif *certFile == \"\" {\n\t\t\t*certFile = testdata.Path(\"server1.pem\")\n\t\t}\n\t\tif *keyFile == \"\" {\n\t\t\t*keyFile = testdata.Path(\"server1.key\")\n\t\t}\n\t\tcreds, err := credentials.NewServerTLSFromFile(*certFile, *keyFile)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to generate credentials %v\", err)\n\t\t}\n\t\topts = []grpc.ServerOption{grpc.Creds(creds)}\n\t}\n\tfmt.Println(\"Running at localhost:\", *port)\n\tgrpcServer := grpc.NewServer(opts...)\n\tpb.RegisterRatingServiceServer(grpcServer, newServer())\n\tgrpcServer.Serve(lis)\n}", "func Start(config *config.Config) error {\n\tCamsPoolInstance = &CamsPool{config: config}\n\tCamsPoolInstance.boot()\n\n\tcc := &CommandCenter{Port: config.Port}\n\tstartError := cc.Start()\n\n\tif startError != nil {\n\t\treturn startError\n\t}\n\n\treturn nil\n}", "func StartSerfin() error {\n\tvar err error\n\tSerfer, err = NewRPCClient(config.Config.SerfAddr)\n\tif err != nil {\n\t\tlogger.Criticalf(err.Error())\n\t\tos.Exit(1)\n\t}\n\n\tif config.Config.SerfEventAnnounce {\n\t\terr = Serfer.UserEvent(\"goiardi-join\", []byte(config.Config.Hostname), true)\n\t\tif err != nil {\n\t\t\tlogger.Criticalf(err.Error())\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\n\treturn nil\n}", "func Start() {\n\tgo BlobServerStart()\n\tgo CollectionServerStart()\n\tgo UploadServerStart()\n}", "func (m *Manager) startNorthboundServer() error {\n\ts := northbound.NewServer(northbound.NewServerCfg(\n\t\tm.Config.CAPath,\n\t\tm.Config.KeyPath,\n\t\tm.Config.CertPath,\n\t\tint16(m.Config.GRPCPort),\n\t\ttrue,\n\t\tnorthbound.SecurityConfig{}))\n\n\tif m.Config.AtomixClient == nil {\n\t\tm.Config.AtomixClient = client.NewClient()\n\t}\n\n\ttopoStore, err := store.NewAtomixStore(m.Config.AtomixClient)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts.AddService(logging.Service{})\n\ts.AddService(service.NewService(topoStore))\n\n\tdoneCh := make(chan error)\n\tgo func() {\n\t\terr := s.Serve(func(started string) {\n\t\t\tlog.Info(\"Started NBI on \", started)\n\t\t\tclose(doneCh)\n\t\t})\n\t\tif err != nil {\n\t\t\tdoneCh <- err\n\t\t}\n\t}()\n\treturn <-doneCh\n}", "func (srv *Server) Start() {\n\tvar config *rest.Config\n\tvar err error\n\n\tif strings.ToUpper(srv.RunMode) == \"KUBE\" {\n\t\t// Create the Kubernetes in-cluster config\n\t\tconfig, err = rest.InClusterConfig()\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t} else {\n\t\t// use the current context in kubeconfig\n\t\tconfig, err = clientcmd.BuildConfigFromFlags(\"\", filepath.Join(util.HomeDir(), \".kube\", \"config\"))\n\t\tif err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t}\n\n\t// Create the clientset\n\tclientset, err := kubernetes.NewForConfig(config)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\t// Create a watcher\n\twatcher, err := clientset.CoreV1().Services(\"\").Watch(metav1.ListOptions{})\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\t// Create a channel for the events to come in from the watcher\n\teventChannel := watcher.ResultChan()\n\n\t// Start an indefinite loop\n\tfor {\n\t\tevt := <-eventChannel\n\t\tsrv.handleEvent(evt)\n\t}\n}", "func (aca *ACA) Start(srv *grpc.Server) {\n\tacaLogger.Info(\"Staring ACA services...\")\n\taca.startACAP(srv)\n\taca.gRPCServer = srv\n\tacaLogger.Info(\"ACA services started\")\n}", "func Serverstart() {\n\n\t//defer connection to database until all db operations are completed\n\tdefer dbmap.Db.Close()\n\trouter := Router()\n\trouter.Run(\":9000\")\n}", "func startServer(grpcServer *grpc.Server, listener net.Listener) {\n\tif err := grpcServer.Serve(listener); err != nil {\n\t\tlog.Fatalf(\"Error running server: %server\", err)\n\t}\n}", "func main() {\n\t// The ccid is assigned to the chaincode on install (using the “peer lifecycle chaincode install <package>” command) for instance\n\n\tif len(os.Args) < 3 {\n\t\tfmt.Println(\"Please supply:\\n- installed chaincodeID (using the “peer lifecycle chaincode install <package>” command)\\n- chaincode address (host:port)\")\n\t\treturn\n\t}\n\n\tccid := os.Args[1]\n\taddress := os.Args[2]\n\n\tserver := &shim.ChaincodeServer{\n\t\tCCID: ccid,\n\t\tAddress: address,\n\t\tCC: new(SimpleChaincode),\n\t\tTLSProps: shim.TLSProperties{\n\t\t\tDisabled: true,\n\t\t},\n\t}\n\n\tfmt.Println(\"Start Chaincode server on \" + address)\n\terr := server.Start()\n\tif err != nil {\n\t\tfmt.Printf(\"Error starting Simple chaincode: %s\", err)\n\t\treturn\n\t}\n}", "func (s *Server) Start(ctx context.Context) error {\n\t// Create a cache\n\tsrv3 := serverv3.NewServer(ctx, s.Cache, nil)\n\n\t// gRPC golang library sets a very small upper bound for the number gRPC/h2\n\t// streams over a single TCP connection. If a proxy multiplexes requests over\n\t// a single connection to the management server, then it might lead to\n\t// availability problems.\n\tvar grpcOptions []grpc.ServerOption\n\tgrpcOptions = append(grpcOptions, grpc.MaxConcurrentStreams(grpcMaxConcurrentStreams))\n\tgrpcServer := grpc.NewServer(grpcOptions...)\n\n\tlis, err := net.Listen(\"tcp\", s.ListenAddress)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"envoy control plane server failed while start listening: %w\", err)\n\t}\n\n\tregisterServer(grpcServer, srv3)\n\n\ts.Log.Infow(\"starting management service\", \"listen-address\", s.ListenAddress)\n\tif err = grpcServer.Serve(lis); err != nil {\n\t\treturn fmt.Errorf(\"envoy control plane server failed while start serving incoming connections: %w\", err)\n\t}\n\t<-ctx.Done()\n\treturn nil\n}", "func (ms *ManagerServer) Start() {\n\tms.log.Trace(\"Starting manager server...\")\n\terr := ms.server.ListenAndServeTLS(ms.certPath, ms.keyPath)\n\tif err != nil {\n\t\tms.log.Error(\"Cannot start the server:\", err)\n\t\tos.Exit(1)\n\t}\n}", "func Start() {\n\twebServer.Engine.Run(\":\" + strconv.Itoa(cfg.Read().App.WebServerPort))\n}", "func (n *Server) Start() error {\n\tlog.Infof(\"Attach server listening on %s:%d\", n.ip, n.port)\n\n\taddr, err := net.ResolveTCPAddr(\"tcp\", fmt.Sprintf(\"%s:%d\", n.ip, n.port))\n\n\tn.l, err = net.ListenTCP(\"tcp\", addr)\n\tif err != nil {\n\t\terr = fmt.Errorf(\"Attach server error %s: %s\", addr, errors.ErrorStack(err))\n\t\tlog.Errorf(\"%s\", err)\n\t\treturn err\n\t}\n\n\t// starts serving requests immediately\n\tn.connServer = NewConnector(n.l)\n\n\treturn nil\n}", "func startServer(dataSlice []string) {\n\te := echo.New()\n\n\te.GET(\"/\", func(f echo.Context) error {\n\t\treturn f.JSON(http.StatusOK, dataSlice)\n\t})\n\n\tfmt.Println(\"Server running: http://localhost:8000\")\n\te.Logger.Fatal(e.Start(\":8000\"))\n}", "func main() {\n\tlogrus.SetFormatter(&logrus.TextFormatter{\n\t\tForceColors: true,\n\t\tFullTimestamp: true,\n\t})\n\tmlog := logrus.WithFields(logrus.Fields{\n\t\t\"component\": componentName,\n\t\t\"version\": env.Version(),\n\t})\n\n\tgrpc_logrus.ReplaceGrpcLogger(mlog.WithField(\"component\", componentName+\"_grpc\"))\n\tmlog.Infof(\"Starting %s\", componentName)\n\n\tgrpcServer, err := createGRPCServer(mlog)\n\tif err != nil {\n\t\tmlog.WithError(err).Fatal(\"failed to create grpc server\")\n\t}\n\t// Start go routines\n\tgo handleExitSignals(grpcServer, mlog)\n\tserveGRPC(env.ServiceAddr(), grpcServer, mlog)\n}", "func main() {\n\n\tvar logger *simple.Logger\n\n\tif os.Getenv(\"LOG_LEVEL\") == \"\" {\n\t\tlogger = &simple.Logger{Level: \"info\"}\n\t} else {\n\t\tlogger = &simple.Logger{Level: os.Getenv(\"LOG_LEVEL\")}\n\t}\n\terr := validator.ValidateEnvars(logger)\n\tif err != nil {\n\t\tos.Exit(-1)\n\t}\n\n\t// setup our client connectors (message producer)\n\tconn := connectors.NewClientConnectors(logger)\n\n\t// call the start server function\n\tlogger.Info(\"Starting server on port \" + os.Getenv(\"SERVER_PORT\"))\n\tstartHttpServer(conn)\n}", "func (sw *Switcher) Start() {\n\tsw.Server.Start()\n}", "func main() {\n\n\tfmt.Println(\">>> Connecting to Agones with the SDK\")\n\ts, err := sdk.NewSDK()\n\tif err != nil {\n\t\tlog.Fatalf(\">>> Could not connect to sdk: %v\", err)\n\t}\n\n\tfmt.Println(\">>> Starting health checking\")\n\tgo doHealth(s)\n\tgo doPing(s)\n\tfmt.Println(\">>> Starting wrapper for csgo!\")\n\tcmd := exec.Command(\"/home/csgo/hlserver/csgo.sh\") // #nosec\n\tcmd.Stderr = &interceptor{forward: os.Stderr}\n\tcmd.Stdout = &interceptor{\n\t\tforward: os.Stdout,\n\t\tintercept: func(p []byte) {\n\n\t\t\tstr := strings.TrimSpace(string(p))\n\t\t\t// csgo will say \"Server listening\" 4 times before being ready,\n\t\t\t// once for ipv4 and once for ipv6.\n\t\t\t// but it does it each twice because it loads the maps between\n\t\t\t// each one, and resets state as it does so\n\t\t\tfmt.Println(str)\n\t\t}}\n\terr = cmd.Start()\n\tif err != nil {\n\t\tlog.Fatalf(\">>> Error Starting Cmd %v\", err)\n\t}\n\n\terr = cmd.Wait()\n\tlog.Fatal(\">>> csgo shutdown unexpectantly\", err)\n}", "func Start(ccfg Config) (clus *Cluster, err error) {\n\tif ccfg.Size > 7 {\n\t\treturn nil, fmt.Errorf(\"max cluster size is 7, got %d\", ccfg.Size)\n\t}\n\n\tlg.Infof(\"starting %d Members (root directory %q, root port :%d)\", ccfg.Size, ccfg.RootDir, ccfg.RootPort)\n\n\tdt := ccfg.DialTimeout\n\tif dt == time.Duration(0) {\n\t\tdt = defaultDialTimeout\n\t}\n\n\tclus = &Cluster{\n\t\tembeddedClient: ccfg.EmbeddedClient,\n\t\tStarted: time.Now(),\n\t\tsize: ccfg.Size,\n\t\tMembers: make([]*Member, ccfg.Size),\n\t\tclientHostToIndex: make(map[string]int, ccfg.Size),\n\t\tclientDialTimeout: dt,\n\t\tstopc: make(chan struct{}),\n\t\trootCtx: ccfg.RootCtx,\n\t\trootCancel: ccfg.RootCancel,\n\n\t\tbasePort: ccfg.RootPort,\n\t\trootDir: ccfg.RootDir,\n\t\tccfg: ccfg,\n\t}\n\n\tif !existFileOrDir(ccfg.RootDir) {\n\t\tlg.Infof(\"creating root directory %q\", ccfg.RootDir)\n\t\tif err = mkdirAll(ccfg.RootDir); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tlg.Infof(\"removing root directory %q\", ccfg.RootDir)\n\t\tos.RemoveAll(ccfg.RootDir)\n\t}\n\n\tlg.Infof(\"getting default host\")\n\tdhost, err := netutil.GetDefaultHost()\n\tif err != nil {\n\t\tlg.Warn(err)\n\t\tlg.Warn(\"overwriting default host with 'localhost\")\n\t\tdhost = \"localhost\"\n\t}\n\tlg.Infof(\"detected default host %q\", dhost)\n\n\tif !ccfg.PeerTLSInfo.Empty() && ccfg.PeerAutoTLS {\n\t\treturn nil, fmt.Errorf(\"choose either auto peer TLS or manual peer TLS\")\n\t}\n\tif !ccfg.ClientTLSInfo.Empty() && ccfg.ClientAutoTLS {\n\t\treturn nil, fmt.Errorf(\"choose either auto client TLS or manual client TLS\")\n\t}\n\n\tstartPort := ccfg.RootPort\n\tfor i := 0; i < ccfg.Size; i++ {\n\t\tcfg := embed.NewConfig()\n\n\t\tcfg.ClusterState = embed.ClusterStateFlagNew\n\n\t\tcfg.Name = fmt.Sprintf(\"node%d\", i+1)\n\t\tcfg.Dir = filepath.Join(ccfg.RootDir, cfg.Name+\".data-dir-etcd\")\n\t\tcfg.WalDir = filepath.Join(ccfg.RootDir, cfg.Name+\".data-dir-etcd\", \"wal\")\n\n\t\t// this is fresh cluster, so remove any conflicting data\n\t\tos.RemoveAll(cfg.Dir)\n\t\tlg.Infof(\"removed %q\", cfg.Dir)\n\t\tos.RemoveAll(cfg.WalDir)\n\t\tlg.Infof(\"removed %q\", cfg.WalDir)\n\n\t\tcurl := url.URL{Scheme: ccfg.ClientScheme(), Host: fmt.Sprintf(\"localhost:%d\", startPort)}\n\t\tcfg.ACUrls = []url.URL{curl}\n\t\tcfg.LCUrls = []url.URL{curl}\n\t\tif dhost != \"localhost\" {\n\t\t\t// expose default host to other machines in listen address (e.g. Prometheus dashboard)\n\t\t\tcurl2 := url.URL{Scheme: ccfg.ClientScheme(), Host: fmt.Sprintf(\"%s:%d\", dhost, startPort)}\n\t\t\tcfg.LCUrls = append(cfg.LCUrls, curl2)\n\t\t\tlg.Infof(\"%q is set up to listen on client url %q (default host)\", cfg.Name, curl2.String())\n\t\t}\n\t\tlg.Infof(\"%q is set up to listen on client url %q\", cfg.Name, curl.String())\n\n\t\tpurl := url.URL{Scheme: ccfg.PeerScheme(), Host: fmt.Sprintf(\"localhost:%d\", startPort+1)}\n\t\tcfg.APUrls = []url.URL{purl}\n\t\tcfg.LPUrls = []url.URL{purl}\n\t\tlg.Infof(\"%q is set up to listen on peer url %q\", cfg.Name, purl.String())\n\n\t\tcfg.ClientAutoTLS = ccfg.ClientAutoTLS\n\t\tcfg.ClientTLSInfo = ccfg.ClientTLSInfo\n\t\tcfg.PeerAutoTLS = ccfg.PeerAutoTLS\n\t\tcfg.PeerTLSInfo = ccfg.PeerTLSInfo\n\n\t\t// auto-compaction every hour\n\t\tcfg.AutoCompactionMode = embed.CompactorModePeriodic\n\t\tcfg.AutoCompactionRetention = \"1h\"\n\n\t\tcfg.Logger = \"zap\"\n\t\tcfg.LogOutputs = []string{embed.StdErrLogOutput}\n\n\t\tclus.Members[i] = &Member{\n\t\t\tclus: clus,\n\t\t\tcfg: cfg,\n\t\t\tstatus: clusterpb.MemberStatus{\n\t\t\t\tName: cfg.Name,\n\t\t\t\tEndpoint: curl.String(),\n\t\t\t\tIsLeader: false,\n\t\t\t\tState: clusterpb.StoppedMemberStatus,\n\t\t\t},\n\t\t}\n\n\t\tclus.clientHostToIndex[curl.Host] = i\n\n\t\tstartPort += 2\n\t}\n\tclus.basePort = startPort\n\n\tfor i := 0; i < clus.size; i++ {\n\t\tclus.Members[i].cfg.InitialCluster = clus.initialCluster()\n\t}\n\n\tvar g errgroup.Group\n\tfor i := 0; i < clus.size; i++ {\n\t\tidx := i\n\t\tg.Go(func() error { return clus.Members[idx].Start() })\n\t}\n\tif gerr := g.Wait(); gerr != nil {\n\t\treturn nil, gerr\n\t}\n\n\ttime.Sleep(time.Second)\n\n\treturn clus, clus.WaitForLeader()\n}", "func (s *echoServer) Start() {\n\te := s.Instance\n\n\t// Currently this server is only used for the core API so the logic below\n\t// is fine here. If we need to expand this to be used in multiple locations\n\t// the below can be done via first-class functions\n\te.Pre(middleware.HTTPSRedirect())\n\n\te.Use(middleware.RequestID())\n\te.Use(middleware.Recover())\n\te.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{\n\t\tOutput: logger.Instance().Logger().Writer(),\n\t}))\n\n\trouter.Setup(e)\n\n\tport := utils.GetVariable(consts.API_PORT)\n\tport = fmt.Sprintf(\":%s\", port)\n\n\tcertDir := utils.GetVariable(consts.CERT_DIR)\n\te.Logger.Fatal(e.StartTLS(port,\n\t\tfmt.Sprintf(\"%s/%s\", certDir, utils.GetVariable(consts.API_CERT)),\n\t\tfmt.Sprintf(\"%s/%s\", certDir, utils.GetVariable(consts.API_KEY))))\n}", "func Start() error {\n\tfmt.Printf(\"RPC listening on port %d\\n\", cfg.Port())\n\tlis, err := kcp.ListenWithOptions(fmt.Sprintf(\"[%s]:%d\", \"::\", cfg.Port()), nil, 10, 3)\n\tif err != nil {\n\t\treturn err\n\t}\n\tserver := grpc.NewServer(grpc.MaxRecvMsgSize(cfg.MaxSendRecvMsgSize()), grpc.MaxSendMsgSize(cfg.MaxSendRecvMsgSize()))\n\tpb_center.RegisterFyerCenterServer(server, new(rpcImpl))\n\treturn server.Serve(lis)\n}", "func (m *MetaNode) startServer() (err error) {\n\t// initialize and start the server.\n\tm.httpStopC = make(chan uint8)\n\tln, err := net.Listen(\"tcp\", \":\"+m.listen)\n\tif err != nil {\n\t\treturn\n\t}\n\tgo func(stopC chan uint8) {\n\t\tdefer ln.Close()\n\t\tfor {\n\t\t\tconn, err := ln.Accept()\n\t\t\tselect {\n\t\t\tcase <-stopC:\n\t\t\t\treturn\n\t\t\tdefault:\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tgo m.serveConn(conn, stopC)\n\t\t}\n\t}(m.httpStopC)\n\tlog.LogInfof(\"start server over...\")\n\treturn\n}", "func (s *server) Start() error {\n\t// Start C2 server\n\tdbType := \"E4C2_DB_TYPE=sqlite3\"\n\tdbName := fmt.Sprintf(\"E4C2_DB_FILE=%s\", s.dbPath)\n\tbroker := fmt.Sprintf(\"E4C2_MQTT_BROKER=tcp://%s\", s.mqttEndpoint)\n\tesEnable := \"E4C2_ES_ENABLE=false\"\n\tpassphrase := \"E4C2_DB_ENCRYPTION_PASSPHRASE=very_secure_testpass\"\n\tcryptoMode := \"E4C2_CRYPTO_MODE=symkey\"\n\n\tfmt.Fprintf(os.Stderr, \"Database set to %s\\n\", dbName)\n\tfmt.Fprintf(os.Stderr, \"Broker set to %s\\n\", broker)\n\n\tenv := []string{\n\t\tdbType,\n\t\tdbName,\n\t\tbroker,\n\t\tesEnable,\n\t\tpassphrase,\n\t\tcryptoMode,\n\t}\n\n\ts.cmd = exec.Command(\"bin/c2\")\n\ts.cmd.Env = append(os.Environ(), env...)\n\n\ts.cmd.Stdout = os.Stderr\n\ts.cmd.Stderr = os.Stderr\n\n\tif err := s.cmd.Start(); err != nil {\n\t\treturn fmt.Errorf(\"failed to start server: %v\", err)\n\t}\n\n\t// Wait for server to be ready\n\tretryTimeout := 100 * time.Millisecond\n\tmaxRetryCount := 100\n\tretryCount := 0\n\n\tticker := time.NewTicker(retryTimeout)\n\tdefer ticker.Stop()\n\n\tfor range ticker.C {\n\t\tif CheckC2Online(\"127.0.0.1\", 5555, 8888) {\n\t\t\treturn nil\n\t\t}\n\n\t\tif retryCount > maxRetryCount {\n\t\t\ts.Stop()\n\t\t\treturn errors.New(\"timeout while waiting for server to start\")\n\t\t}\n\n\t\tretryCount++\n\t}\n\n\treturn nil\n}", "func (s *Server) Run(mux *http.ServeMux, port string) error {\n\ts.callbackServer = &http.Server{\n\t\tAddr: \":\" + port,\n\t\tMaxHeaderBytes: 1 << 20,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tHandler: mux,\n\t}\n\treturn s.callbackServer.ListenAndServeTLS(\"/cert/server.crt\", \"/cert/server.key\")\n}", "func (server *SServer) Start() {\n\tlistener, err := network.Listen(server.config.GetAcraConnectionString())\n\tif err != nil {\n\t\tlog.WithError(err).Errorln(\"can't start listen connections\")\n\t\treturn\n\t}\n\tdefer listener.Close()\n\tserver.addListener(listener)\n\tlog.Infof(\"start listening %s\", server.config.GetAcraConnectionString())\n\tfor {\n\t\tconnection, err := listener.Accept()\n\t\tif err != nil {\n\t\t\tlog.WithError(err).Errorln(\"can't accept new connection\")\n\t\t\tcontinue\n\t\t}\n\t\t// unix socket and value == '@'\n\t\tif len(connection.RemoteAddr().String()) == 1 {\n\t\t\tlog.Infof(\"new connection to acraserver: <%v>\", connection.LocalAddr())\n\t\t} else {\n\t\t\tlog.Infof(\"new connection to acraserver: <%v>\", connection.RemoteAddr())\n\t\t}\n\t\tgo server.handleConnection(connection)\n\t}\n}", "func RunServer(configFile string) {\n\tc := make(chan os.Signal, 1)\n\tsignal.Notify(c, syscall.SIGTERM)\n\tserver, err := NewServer(configFile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tlog.Info(\"Gohan no jikan desuyo (It's time for dinner!)\")\n\tlog.Info(\"Build version: %s\", version.Build.Version)\n\tlog.Info(\"Build timestamp: %s\", version.Build.Timestamp)\n\tlog.Info(\"Build host: %s\", version.Build.Host)\n\tlog.Info(\"Starting Gohan Server...\")\n\taddress := server.address\n\tif strings.HasPrefix(address, \":\") {\n\t\taddress = \"localhost\" + address\n\t}\n\tprotocol := \"http\"\n\tif server.tls != nil {\n\t\tprotocol = \"https\"\n\t}\n\tlog.Info(\" API Server %s://%s/\", protocol, address)\n\tlog.Info(\" Web UI %s://%s/webui/\", protocol, address)\n\tgo func() {\n\t\tfor range c {\n\t\t\tlog.Info(\"Stopping the server...\")\n\t\t\tlog.Info(\"Tearing down...\")\n\t\t\tlog.Info(\"Stopping server...\")\n\t\t\tserver.Stop()\n\t\t}\n\t}()\n\tserver.running = true\n\tserver.masterCtx, server.masterCtxCancel = context.WithCancel(context.Background())\n\n\tserver.startSyncProcesses()\n\n\tstartCRONProcess(server)\n\tmetrics.StartMetricsProcess()\n\terr = server.Start()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func main() {\n\tflag.Parse()\n\n\tclusterNodeConfig := config.NewClusterNodeConfig(nodeID, raftDir, raftAddr, httpAddr, joinHttpAddr)\n\n\tnode := server.NewClusterNode(clusterNodeConfig)\n\n\t// Request to join cluster leader node if not bootstrap\n\tif !clusterNodeConfig.IsBootstrap() {\n\t\tif err := node.RequestJoinCluster(clusterNodeConfig.JoinHttpAddr, clusterNodeConfig.RaftAddr, clusterNodeConfig.NodeID); err != nil {\n\t\t\tpanic(err.Error())\n\t\t}\n\t}\n\n\t// Start http handler\n\thttpHandler := server.NewNodeHttpHandler(node, clusterNodeConfig.HttpAddr)\n\tif err := httpHandler.Start(); err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\tnode.Logger.Println(\"Cluster Node Id \\\"\", nodeID, \"\\\" started successfully\")\n\n\tterminate := make(chan os.Signal, 1)\n\tsignal.Notify(terminate, os.Kill, os.Interrupt, syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT)\n\t<-terminate\n\n\t// snapshot when process terminated\n\tnode.SnapshotClusterNode()\n\tnode.Logger.Println(\"Cluster Node Id \\\"\", nodeID, \"\\\" exiting\")\n\n}", "func (teaAgentServer *TeaAgentServer) Start() {\n\t// Configure Root Handler\n\tconfigureRootHandler(teaAgentServer)\n\t// Configure AgentExtenderInfo handler\n\tconfigureAgentExtenderInfoHandler(teaAgentServer)\n\t// Configure AgentExtenderDetails handler\n\tconfigureAgentExtenderDetailsHandler(teaAgentServer)\n\t// Configure Ping handler\n\tconfigurePingHandler(teaAgentServer)\n\t// Configure Task handler\n\tconfigureTaskHandler(teaAgentServer)\n\t\t\n\terr := http.ListenAndServe(\":\" + strconv.Itoa(teaAgentServer.Port), nil)\n\tif err != nil {\n \t\tlog.Fatal(\"Error Configuring Agent Details: \", err)\n \t}\n\n}", "func StartAddama(configFile *goconf.ConfigFile) {\n\thostname := GetRequiredString(configFile, \"default\", \"hostname\")\n\n\taddamaConn := AddamaConnection{\n\t\ttarget: GetRequiredString(configFile, \"addama\", \"target\"),\n\t\tconnectionFile: GetRequiredString(configFile, \"addama\", \"connectionFile\"),\n\t\tserviceHost: GetRequiredString(configFile, \"addama\", \"host\"),\n\t\tserviceName: GetRequiredString(configFile, \"addama\", \"service\"),\n\t\turi: GetRequiredString(configFile, \"addama\", \"uri\"),\n\t\tlabel: GetRequiredString(configFile, \"addama\", \"label\"),\n\t\tapikey: GetRequiredString(configFile, \"default\", \"password\")}\n\n\thttp.Handle(\"/\", NewAddamaProxy(addamaConn))\n\n\tListenAndServeTLSorNot(hostname)\n}", "func main() {\n\n\tgo func() {\n\t\tdataBase.ConnectDataBase()\n\t}()\n\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", 7777))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\ts := controller.Server{}\n\tgrpcServer := grpc.NewServer()\n\tproto.RegisterAuthenticationServer(grpcServer, &s)\n\n\tif err := grpcServer.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %s\", err)\n\t}\n}", "func (s *Server) Run(cmd *cobra.Command, args []string) {\n\tcertManager := autocert.Manager{\n\t\tPrompt: autocert.AcceptTOS,\n\t\tHostPolicy: autocert.HostWhitelist(\"www.my-domain.go\"), //your domain here\n\t\tCache: autocert.DirCache(\"certs\"), //folder for storing certificates\n\t}\n\n\t// // read configuration\n\t// configfile := flag.String(\"config\", \"config.json\", \"Configuration file\")\n\t// flag.Parse()\n\n\tconfig := FromFile(*configfile)\n\n\tmux := http.NewServeMux()\n\tRewriteRequest(config, mux)\n\n\tserver := &http.Server{\n\t\tAddr: \":https\",\n\t\tTLSConfig: &tls.Config{\n\t\t\tGetCertificate: certManager.GetCertificate,\n\t\t},\n\t\tHandler: mux,\n\t}\n\n\terr := server.ListenAndServeTLS(\"\", \"\") //key and cert are comming from Let's Encrypt\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}", "func (s Serf) Start() error {\n\teventCh := make(chan serf.Event, 64)\n\n\t// Get the bind address\n\tbindAddr := os.Getenv(\"SERVUS_BIND_ADDRESS\")\n\tif len(bindAddr) == 0 {\n\t\tbindAddr = defaultBindAddr + \":\" + defaultBindPort\n\t}\n\taddr, strPort, err := net.SplitHostPort(bindAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif len(strPort) == 0 {\n\t\tstrPort = defaultBindPort\n\t}\n\tport, err := strconv.Atoi(strPort)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tconf := serf.DefaultConfig()\n\t// Get tags from config settigns\n\tconf.Tags = map[string]string{\n\t\t\"role\": \"servus\",\n\t\t\"tag1\": \"foo\",\n\t\t\"tag2\": \"bar\",\n\t}\n\n\tconf.Init()\n\n\t// Get these parameters from config settings\n\tconf.MemberlistConfig.BindAddr = addr\n\tconf.MemberlistConfig.BindPort = port\n\tconf.NodeName = bindAddr\n\n\tconf.EventCh = eventCh\n\ts.eventCh = eventCh\n\n\tsrf, err := serf.Create(conf)\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.serf = srf\n\n\t// Join, move to func\n\t// Get members in the clusters to join\n\tleader := os.Getenv(\"SERVUS_LEADER_ADDRESS\")\n\tinitMembers := []string{leader}\n\n\tif len(leader) != 0 {\n\t\ts.initMembers = initMembers\n\t\tnum, err := s.serf.Join(s.initMembers, true)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// Log?:\n\t\tfmt.Printf(\"Node join to the cluster with %d nodes\", num)\n\t} else {\n\t\t// Log?:\n\t\tfmt.Print(\"First node in the cluster\\n\")\n\t}\n\n\tgo s.serfEventHandlerLoop()\n\n\treturn nil\n}", "func (s *Server) Run(ctx context.Context) error {\n\tvar httpsServer *http.Server\n\tvar httpHandler http.Handler\n\tif s.conf.SecurePort > 0 {\n\t\tif s.conf.SecureHostname == \"\" {\n\t\t\treturn errors.New(\"SECUREHOSTNAME not set\")\n\t\t}\n\t\tvar certDir string\n\t\tif s.conf.CertDir != \"\" {\n\t\t\tcertDir = s.conf.CertDir\n\t\t} else {\n\t\t\tcacheDir, err := os.UserCacheDir()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tcertDir = filepath.Join(cacheDir, \"wuerfler-certs\")\n\t\t}\n\t\tif !strings.Contains(strings.Trim(s.conf.SecureHostname, \".\"), \".\") {\n\t\t\treturn errors.New(\"acme/autocert aha: server name component count invalid\")\n\t\t}\n\t\tm := &autocert.Manager{\n\t\t\tCache: autocert.DirCache(certDir),\n\t\t\tPrompt: autocert.AcceptTOS,\n\t\t\tHostPolicy: autocert.HostWhitelist(s.conf.SecureHostname),\n\t\t}\n\t\thttpsServer = &http.Server{\n\t\t\tAddr: fmt.Sprintf(\":%d\", s.conf.SecurePort),\n\t\t\tTLSConfig: m.TLSConfig(),\n\t\t\tHandler: s.router,\n\t\t}\n\t\thttpHandler = m.HTTPHandler(http.HandlerFunc(redirect))\n\t} else {\n\t\thttpHandler = s.router\n\t}\n\n\thttpServer := &http.Server{\n\t\tAddr: fmt.Sprintf(\":%d\", s.conf.Port),\n\t\tHandler: httpHandler,\n\t}\n\n\terrCh := make(chan error)\n\tif httpsServer != nil {\n\t\tgo func() {\n\t\t\ts.log.Println(\"Starting HTTPS server.\")\n\t\t\terr := httpsServer.ListenAndServeTLS(\"\", \"\")\n\t\t\ts.log.Println(\"Ended HTTPS server.\")\n\t\t\terrCh <- fmt.Errorf(\"httpsServer.ListenAndServeTLS: %v\", err)\n\t\t}()\n\t}\n\tgo func() {\n\t\ts.log.Println(\"Starting HTTP server.\")\n\t\terr := httpServer.ListenAndServe()\n\t\ts.log.Println(\"Ended HTTP server.\")\n\t\terrCh <- fmt.Errorf(\"httpServer.ListenAndServe: %v\", err)\n\t}()\n\n\tprometheus.MustRegister(rooms.RoomsGauge)\n\tprometheus.MustRegister(ConnectionsGauge)\n\n\tselect {\n\tcase <-ctx.Done():\n\t\tif httpsServer != nil {\n\t\t\terr := httpsServer.Close()\n\t\t\tif err != nil {\n\t\t\t\tlog.Println(\"httpsServer.Close:\", err)\n\t\t\t}\n\t\t}\n\t\terr := httpServer.Close()\n\t\tif err != nil {\n\t\t\tlog.Println(\"httpServer.Close:\", err)\n\t\t}\n\t\treturn nil\n\tcase err := <-errCh:\n\t\treturn err\n\t}\n}", "func (s *server) Start() {\n\t// Already started?\n\tif atomic.AddInt32(&s.started, 1) != 1 {\n\t\tlogging.CPrint(logging.INFO, \"started exit\", logging.LogFormat{\"started\": s.started})\n\t\treturn\n\t}\n\n\tlogging.CPrint(logging.TRACE, \"starting server\", logging.LogFormat{})\n\n\t// srvrLog.Trace(\"Starting server\")\n\tlogging.CPrint(logging.INFO, \"begin to start any com\", logging.LogFormat{})\n\n\t// Start SyncManager\n\ts.syncManager.Start()\n\n\ts.wg.Add(1)\n\n}", "func main() {\n\tserver.New().Start()\n}", "func startHTTPServer(ch chan<- bool) {\n\tserver := http.Server{\n\t\tAddr: \":80\",\n\t}\n\tlog.Println(\"HTTP server started (listening on port 80).\")\n\tlog.Println(\"HTTP server stopped with error:\", server.ListenAndServe())\n\tch <- true\n}", "func (s *ManageServer) StartServer() bool {\n\t// raft server\n\tvar err error\n\ts.RNode, err = newRaftNode(*s.Advertise, *s.Cluster, false, *s.MetaFolder)\n\tif err != nil {\n\t\tglog.Fatalf(\"manage node server failed to new raft node: %v\", err)\n\t}\n\t// start a manage node server\n\taddress, _ := url.Parse(*s.Advertise)\n\traftPost, _ := strconv.Atoi(address.Port())\n\thttpPost := raftPost-100\n\n\tlistener, e := util.NewListener( address.Hostname() + \":\" + strconv.Itoa(httpPost), 0)\n\tif e != nil {\n\t\tglog.Fatalf(\"manage node server startup error: %v\", e)\n\t}\n\n\tm := cmux.New(listener)\n\tgrpcL := m.MatchWithWriters(cmux.HTTP2MatchHeaderFieldSendSettings(\"content-type\", \"application/grpc\"))\n\thttpL := m.Match(cmux.Any())\n\n\t// Create your protocol servers.\n\tgrpcS := grpc.NewServer()\n\tpb.RegisterSeaweedServer(grpcS, s)\n\treflection.Register(grpcS)\n\n\thttpS := &http.Server{Handler: s.Router}\n\n\tgo grpcS.Serve(grpcL)\n\tgo httpS.Serve(httpL)\n\n\tif err := m.Serve(); err != nil {\n\t\tglog.Fatalf(\"manage node server failed to serve: %v\", err)\n\t}\n\n\treturn true\n}", "func StartServer(config map[string]interface{}) {\n\tsetupConfig(config)\n\tconnectToDB()\n\tmodels.BootStrap()\n\tserver.LoadInternalResources()\n\tviews.BootStrap()\n\tactions.BootStrap()\n\tcontrollers.BootStrap()\n\tmenus.BootStrap()\n\tserver.PostInit()\n\tsrv := server.GetServer()\n\tlog.Info(\"YEP is up and running\")\n\tsrv.Run()\n}", "func (api *API) Start() error {\n\treturn api.Server.Start(fmt.Sprintf(\":%d\", api.config.Port))\n}", "func Start() error {\n\te := echo.New()\n\te.SetDebug(config.Conf.Debug)\n\n\t// Middleware\n\te.Use(middleware.Logger())\n\te.Use(middleware.Recover())\n\n\t// setup access logger\n\tlogPath := \"/var/log/vuls/access.log\"\n\tif _, err := os.Stat(logPath); os.IsNotExist(err) {\n\t\tif _, err := os.Create(logPath); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tf, err := os.OpenFile(logPath, os.O_APPEND|os.O_WRONLY, 0600)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\tlogconf := middleware.DefaultLoggerConfig\n\tlogconf.Output = f\n\te.Use(middleware.LoggerWithConfig(logconf))\n\n\t// Routes\n\te.Get(\"/health\", health())\n\te.Get(\"/cves/:id\", getCve())\n\te.Post(\"/cpes\", getCveByCpeName())\n\n\tbindURL := fmt.Sprintf(\"%s:%s\", config.Conf.Bind, config.Conf.Port)\n\tlog.Infof(\"Listening on %s\", bindURL)\n\n\te.Run(standard.New(bindURL))\n\treturn nil\n}", "func (as *AgentServer) Run() {\n\t//register agent\n\tkillHeartBeaterChan := make(chan bool, 1)\n\tgo client.NewHeartBeater(*as.Option.Host, *as.Option.Port, as.Master).StartAgentHeartBeat(killHeartBeaterChan, func(values url.Values) {\n\t\tresource.AddToValues(values, as.computeResource, as.allocatedResource)\n\t\tvalues.Add(\"dataCenter\", *as.Option.DataCenter)\n\t\tvalues.Add(\"rack\", *as.Option.Rack)\n\t})\n\n\tfor {\n\t\t// Listen for an incoming connection.\n\t\tconn, err := as.listener.Accept()\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error accepting: \", err.Error())\n\t\t\tcontinue\n\t\t}\n\t\t// Handle connections in a new goroutine.\n\t\tas.wg.Add(1)\n\t\tgo func() {\n\t\t\tdefer as.wg.Done()\n\t\t\tdefer conn.Close()\n\t\t\tif err = conn.SetDeadline(time.Time{}); err != nil {\n\t\t\t\tfmt.Printf(\"Failed to set timeout: %v\\n\", err)\n\t\t\t}\n\t\t\tif c, ok := conn.(*net.TCPConn); ok {\n\t\t\t\tc.SetKeepAlive(true)\n\t\t\t}\n\t\t\tas.handleRequest(conn)\n\t\t}()\n\t}\n}", "func main() {\n if len(os.Args) != 2 {\n log.Panic(\"args:\", \"<port>\")\n }\n port := os.Args[1]\n startServer(port)\n}", "func StartApplicatin() {\n\tmapUrls()\n\trouter.Run(\":8080\")\n}", "func (authService *Controller) Start(ctx context.Context, address string, useTLS bool, certPath, keyPath string) error {\n\tif err := authService.configureToken(); err != nil {\n\t\treturn err\n\t}\n\n\t// populate the lists of ClusterRoles to bind in the different peering states.\n\tpermissions, err := peeringroles.GetPeeringPermission(ctx, authService.clientset)\n\tif err != nil {\n\t\tklog.Errorf(\"Unable to populate peering permission: %w\", err)\n\t\treturn err\n\t}\n\tauthService.peeringPermission = *permissions\n\n\trouter := httprouter.New()\n\n\trouter.POST(auth.CertIdentityURI, authService.identity)\n\trouter.GET(auth.IdsURI, authService.ids)\n\n\tif useTLS {\n\t\terr = http.ListenAndServeTLS(address, certPath, keyPath, router)\n\t} else {\n\t\terr = http.ListenAndServe(address, router)\n\t}\n\tif err != nil {\n\t\tklog.Error(err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func startConsul(workingDirectoryPath string) (*exec.Cmd, error) {\n\n\texecutable := filepath.Join(workingDirectoryPath, \"consul\")\n\tdataDir := filepath.Join(workingDirectoryPath, \"consul-data\")\n\n\t// First check if consul is already running\n\tcmd := exec.Command(executable, \"members\")\n\terr := cmd.Run()\n\tif err == nil {\n\t\tfmt.Println(\"Consul is already running\")\n\t\treturn nil, nil\n\t}\n\tconsulLogPath := filepath.Join(workingDirectoryPath, \"consul.log\")\n\tconsulLogFile, err := os.OpenFile(consulLogPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfmt.Printf(\"Starting Consul (logs: %s)\", consulLogPath)\n\t// Consul startup options\n\t// Specifying a bind address or it would fail on a setup with multiple\n\t// IPv4 addresses configured\n\tcmdArgs := \"agent -server -bootstrap-expect 1 -bind 127.0.0.1 -data-dir \" + dataDir\n\tcmd = exec.Command(executable, strings.Split(cmdArgs, \" \")...)\n\tcmd.Stdout = consulLogFile\n\terr = cmd.Start()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Wait for a new leader to be elected, else Yorc could try to access Consul\n\t// when it is not yet ready\n\twaitForConsulReadiness(\"http://127.0.0.1:8500\")\n\tfmt.Println(\" Consul started!\")\n\treturn cmd, err\n}" ]
[ "0.65186095", "0.64201957", "0.6405957", "0.63669497", "0.62811464", "0.62719184", "0.6230946", "0.62305397", "0.62067384", "0.6179799", "0.60812765", "0.60644907", "0.6047146", "0.59924704", "0.59824425", "0.5971501", "0.5946161", "0.59405077", "0.59334403", "0.59077084", "0.5902697", "0.5897694", "0.5863809", "0.586232", "0.58587664", "0.58534175", "0.5836198", "0.58126193", "0.58125365", "0.5804914", "0.5804783", "0.5792049", "0.57802904", "0.57789993", "0.5775196", "0.5765904", "0.57653254", "0.5755067", "0.5754748", "0.5750458", "0.57444423", "0.57410765", "0.57321894", "0.57303566", "0.5723927", "0.5721091", "0.5703523", "0.5701002", "0.569946", "0.56903404", "0.5686699", "0.56763744", "0.5669197", "0.56679106", "0.5663858", "0.56616825", "0.5660865", "0.5660178", "0.5651757", "0.5648854", "0.5644431", "0.5643492", "0.5642718", "0.56414354", "0.5640356", "0.5636131", "0.5635499", "0.56185794", "0.56093156", "0.56079566", "0.5593859", "0.55933464", "0.5591184", "0.55842084", "0.5582456", "0.55807936", "0.55791813", "0.5576643", "0.5573043", "0.55726916", "0.5572685", "0.5572381", "0.55714357", "0.5571164", "0.5564679", "0.5563791", "0.5558531", "0.55580944", "0.55570245", "0.5544666", "0.55409896", "0.55364573", "0.55355537", "0.5533269", "0.5531895", "0.55316424", "0.55296177", "0.552875", "0.5523047", "0.5522839" ]
0.61519676
10
Stop the server WARNING: This forcefully closes the listening socket and may cause requests in transit to fail, and so is only used for testing. A graceful shutdown will be supported with golang 1.8.
func (s *Server) Stop() error { // Stop operations server err := s.Operations.Stop() if err != nil { return err } if s.listener == nil { return nil } _, port, err := net.SplitHostPort(s.listener.Addr().String()) if err != nil { return err } err = s.closeListener() if err != nil { return err } if s.wait == nil { return nil } for i := 0; i < 10; i++ { select { case <-s.wait: log.Debugf("Stop: successful stop on port %s", port) close(s.wait) s.wait = nil return nil default: log.Debugf("Stop: waiting for listener on port %s to stop", port) time.Sleep(time.Second) } } log.Debugf("Stop: timed out waiting for stop notification for port %s", port) // make sure DB is closed err = s.closeDB() if err != nil { log.Errorf("Close DB failed: %s", err) } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *Server) Stop(ctx context.Context) {\n\ts.shutdownFuncsM.Lock()\n\tdefer s.shutdownFuncsM.Unlock()\n\ts.shutdownOnce.Do(func() {\n\t\tclose(s.shuttingDown)\n\t\t// Shut down the HTTP server in parallel to calling any custom shutdown functions\n\t\twg := sync.WaitGroup{}\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\tdefer wg.Done()\n\t\t\tif err := s.srv.Shutdown(ctx); err != nil {\n\t\t\t\tslog.Debug(ctx, \"Graceful shutdown failed; forcibly closing connections 👢\")\n\t\t\t\tif err := s.srv.Close(); err != nil {\n\t\t\t\t\tslog.Critical(ctx, \"Forceful shutdown failed, exiting 😱: %v\", err)\n\t\t\t\t\tpanic(err) // Something is super hosed here\n\t\t\t\t}\n\t\t\t}\n\t\t}()\n\t\tfor _, f := range s.shutdownFuncs {\n\t\t\tf := f // capture range variable\n\t\t\twg.Add(1)\n\t\t\tgo func() {\n\t\t\t\tdefer wg.Done()\n\t\t\t\tf(ctx)\n\t\t\t}()\n\t\t}\n\t\twg.Wait()\n\t})\n}", "func (a *App) Stop() {\n\t// Create a context to attempt a graceful 5 second shutdown.\n\tconst timeout = 5 * time.Second\n\tctx, cancel := context.WithTimeout(context.Background(), timeout)\n\tdefer cancel()\n\n\t// Attempt the graceful shutdown by closing the listener and\n\t// completing all inflight requests.\n\tif err := a.server.Shutdown(ctx); err != nil {\n\t\ta.logger.Printf(\"Could not stop server gracefully: %v\", err)\n\t\ta.logger.Printf(\"Initiating hard shutdown\")\n\t\tif err := a.server.Close(); err != nil {\n\t\t\ta.logger.Printf(\"Could not stop http server: %v\", err)\n\t\t}\n\t}\n}", "func (s *server) Stop() error {\n\t// Bail if we're already shutting down.\n\tif atomic.AddInt32(&s.shutdown, 1) != 1 {\n\t\treturn nil\n\t}\n\n\t// Shutdown the wallet, funding manager, and the rpc server.\n\ts.chainNotifier.Stop()\n\ts.rpcServer.Stop()\n\ts.fundingMgr.Stop()\n\ts.chanRouter.Stop()\n\ts.htlcSwitch.Stop()\n\ts.utxoNursery.Stop()\n\ts.breachArbiter.Stop()\n\ts.discoverSrv.Stop()\n\ts.lnwallet.Shutdown()\n\n\t// Signal all the lingering goroutines to quit.\n\tclose(s.quit)\n\ts.wg.Wait()\n\n\treturn nil\n}", "func (srv *Server) Stop() {\n Warn(fmt.Sprintf(\"stopping server %s\", srv.addrURL.String()))\n srv.mu.Lock()\n if srv.httpServer == nil {\n srv.mu.Unlock()\n return\n }\n graceTimeOut := time.Duration(50)\n ctx, cancel := context.WithTimeout(context.Background(), graceTimeOut)\n defer cancel()\n if err := srv.httpServer.Shutdown(ctx); err != nil {\n Debug(\"Wait is over due to error\")\n if err := srv.httpServer.Close(); err != nil {\n Debug(err.Error())\n }\n Debug(err.Error())\n }\n close(srv.stopc)\n <-srv.donec\n srv.mu.Unlock()\n Warn(fmt.Sprintf(\"stopped server %s\", srv.addrURL.String()))\n}", "func (s *HTTPServer) Stop(ctx context.Context) {\n\ts.guard.Lock()\n\tif err := s.httpSrv.Shutdown(ctx); err != nil {\n\t\ts.guard.Unlock()\n\t\tlog.Fatalf(\"unable to stop a server graceful: %v\", err)\n\t}\n\ts.guard.Unlock()\n}", "func (w *Webserver) Stop() error {\n\tw.logger.Infof(\"gracefully shutting down http server at %d...\", w.config.Port)\n\n\terr := w.Server.Shutdown(context.Background())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tclose(w.jobs)\n\treturn nil\n}", "func (s *server) Stop() error {\n\t// Make sure this only happens once.\n\tif atomic.AddInt32(&s.shutdown, 1) != 1 {\n\t\tlogging.CPrint(logging.INFO, \"server is already in the process of shutting down\", logging.LogFormat{})\n\t\treturn nil\n\t}\n\n\ts.syncManager.Stop()\n\n\t// Signal the remaining goroutines to quit.\n\tclose(s.quit)\n\n\ts.wg.Done()\n\n\treturn nil\n}", "func (s *server) Stop() {\n\tctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)\n\tdefer cancel()\n\n\ts.httpServer.SetKeepAlivesEnabled(false)\n\terr := s.httpServer.Shutdown(ctx)\n\tif err != nil {\n\t\ts.logger.Fatalf(\"could not gracefully shutdown the server: %v\\n\", err)\n\t}\n\n\t<-s.done\n}", "func (s *Server) Stop() error {\n\ts.logger.Log(\"msg\", \"stopping\")\n\tctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancelFn()\n\n\terr := s.encoder.(system.Stoppable).Stop()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = s.mqtt.(system.Stoppable).Stop()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = s.db.Stop()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn s.srv.Shutdown(ctx)\n}", "func (s *Server) Stop() {\n\ts.stopAPIServer()\n\ts.stopFUSEServer()\n\ts.shutdown()\n}", "func (s *Server) Stop() error {\n\n\tizap.Logger.Info(\"Stopping http server\", zap.String(\"address\", s.srv.Addr))\n\treturn s.srv.Shutdown(context.Background())\n}", "func (srv *Server) Stop() error {\n\tif err := srv.app.Shutdown(); err != nil {\n\t\treturn err\n\t}\n\tif err := srv.config.StorageDriver.Disconnect(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (srv *Server) Stop() {\n\terr := srv.httpServer.Shutdown(context.Background())\n\tif err != nil {\n\t\tsrv.log.Errorf(\"Unexpected error while shutting down HTTP server - %s\", err)\n\t}\n\tdefer srv.runCancel()\n}", "func (s *Server) Stop(ctx context.Context) error {\n\tlog.Info(\"[HTTP] server stopping\")\n\treturn s.Shutdown(ctx)\n}", "func stopServer(w http.ResponseWriter, r *http.Request) {\n\tgo localServer.Shutdown(context.Background())\n}", "func (s *Server) Stop() {\n\tctx, cancel := context.WithTimeout(context.Background(), time.Second*10)\n\tdefer cancel()\n\n\ts.logger.Info(\"Shutting down\")\n\tif err := s.server.Shutdown(ctx); err != nil {\n\t\ts.logger.Errorw(\"HTTP server shutdown\",\n\t\t\t\"error\", err,\n\t\t)\n\t}\n}", "func (ts *Server) Stop() error {\n\tif ts.Server == nil {\n\t\treturn nil\n\t}\n\tif err := ts.Server.Shutdown(context.Background()); err != nil {\n\t\treturn err\n\t}\n\tts.Server = nil\n\treturn nil\n}", "func (s Server) stop(ctx context.Context) {\n\ts.grpcServer.Stop()\n\terr := s.httpServer.Shutdown(ctx)\n\tif err != nil {\n\t\tlog.Err(err).Msg(\"error shutting down the http server\")\n\t}\n}", "func (g GrpcServer) Stop() {\n\tg.server.GracefulStop()\n}", "func (server *HTTPServer) Stop(ctx context.Context) error {\n\treturn server.Serv.Shutdown(ctx)\n}", "func (server *HTTPServer) Stop(ctx context.Context) error {\n\treturn server.Serv.Shutdown(ctx)\n}", "func (s *Server) Stop() error {\n\tif s.conn == nil {\n\t\t// nothing to do, return silently\n\t\treturn nil\n\t}\n\n\ts.stopping = true\n\n\terr := s.closeConn()\n\tif err != nil {\n\t\treturn wrapErrPrint(err, \"Couldn't close UDP listening socket\")\n\t}\n\n\t// We've just closed the listening socket.\n\t// Worker thread should exit right after it tries to read from the socket.\n\ts.mutex.Lock()\n\tfor s.running {\n\t\ts.cond.Wait()\n\t}\n\ts.mutex.Unlock()\n\n\ts.dbStore()\n\treturn nil\n}", "func (srv *Server) Stop() {\n\tsrv.s.Close()\n\tsrv.wg.Wait()\n}", "func (s *Server) Stop() {\n\ts.quit.Fire()\n\tdefer func() {\n\t\ts.done.Fire()\n\t}()\n\n\ts.mu.Lock()\n\tconnMgr := s.connMgr\n\ts.connMgr = nil\n\ts.mu.Unlock()\n\n\tconnMgr.close()\n\n\t// Wait for all the connections to close\n\ts.serveWG.Wait()\n}", "func (s *Server) Shutdown(graceful bool) {\n\ts.yorkieServiceCancel()\n\n\tif graceful {\n\t\ts.grpcServer.GracefulStop()\n\t} else {\n\t\ts.grpcServer.Stop()\n\t}\n}", "func Stop() error {\r\n\tlog.Logf(\"Stopping server\")\r\n\treturn DefaultServer.Stop()\r\n}", "func (s *Server) Stop() {\n\tif s.listener != nil {\n\t\ts.listener.Close()\n\t}\n}", "func (s *Server) Stop() {\n\tif s.listener != nil {\n\t\ts.listener.Close()\n\t}\n}", "func (hSvr *HTTPServer) Stop(ctx context.Context) error {\n\treturn hSvr.svr.Shutdown(ctx)\n}", "func (server *Server) Stop() {\n\tserver.listener.Close()\n\tclose(server.client)\n\tclose(server.message)\n}", "func (s *Server) Stop() error {\n\treturn nil\n}", "func (s *Server) Stop() error {\n\tif err := s.conn.Close(); err != nil {\n\t\treturn fmt.Errorf(\"Could not close connection. Err: %s\", err)\n\t}\n\n\terr := s.agonesSDK.Shutdown()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Could not shutdown Agones. Err: %s\", err)\n\t}\n\n\treturn nil\n}", "func (ks *KaiServer) Stop() error {\n\tks.logger.Info(\"stop-server\")\n\tdefer ks.logger.Info(\"stop\")\n\n\tif ks.listenNetwork == \"unix\" {\n\t\tif err := os.Remove(ks.listenAddr); err != nil {\n\t\t\tks.logger.Infof(\"failed-to-stop-server listenAddr: %s\", ks.listenAddr)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn ks.Listener.Close()\n}", "func (r *runtime) Stop() {\n\tr.logger.Info(\"stopping broker server...\")\n\tdefer r.cancel()\n\n\tr.Shutdown()\n\n\tif r.httpServer != nil {\n\t\tr.logger.Info(\"stopping http server...\")\n\t\tif err := r.httpServer.Close(r.ctx); err != nil {\n\t\t\tr.logger.Error(\"shutdown http server error\", logger.Error(err))\n\t\t} else {\n\t\t\tr.logger.Info(\"stopped http server successfully\")\n\t\t}\n\t}\n\n\t// close registry, deregister broker node from active list\n\tif r.registry != nil {\n\t\tr.logger.Info(\"closing discovery-registry...\")\n\t\tif err := r.registry.Deregister(r.node); err != nil {\n\t\t\tr.logger.Error(\"unregister broker node error\", logger.Error(err))\n\t\t}\n\t\tif err := r.registry.Close(); err != nil {\n\t\t\tr.logger.Error(\"unregister broker node error\", logger.Error(err))\n\t\t} else {\n\t\t\tr.logger.Info(\"closed discovery-registry successfully\")\n\t\t}\n\t}\n\n\tif r.master != nil {\n\t\tr.logger.Info(\"stopping master...\")\n\t\tr.master.Stop()\n\t}\n\n\tif r.stateMachineFactory != nil {\n\t\tr.stateMachineFactory.Stop()\n\t}\n\n\tif r.repo != nil {\n\t\tr.logger.Info(\"closing state repo...\")\n\t\tif err := r.repo.Close(); err != nil {\n\t\t\tr.logger.Error(\"close state repo error, when broker stop\", logger.Error(err))\n\t\t} else {\n\t\t\tr.logger.Info(\"closed state repo successfully\")\n\t\t}\n\t}\n\tif r.stateMgr != nil {\n\t\tr.stateMgr.Close()\n\t}\n\tif r.srv.channelManager != nil {\n\t\tr.logger.Info(\"closing write channel manager...\")\n\t\tr.srv.channelManager.Close()\n\t\tr.logger.Info(\"closed write channel successfully\")\n\t}\n\n\tif r.factory.connectionMgr != nil {\n\t\tif err := r.factory.connectionMgr.Close(); err != nil {\n\t\t\tr.logger.Error(\"close connection manager error, when broker stop\", logger.Error(err))\n\t\t} else {\n\t\t\tr.logger.Info(\"closed connection manager successfully\")\n\t\t}\n\t}\n\tr.logger.Info(\"close connections successfully\")\n\n\t// finally, shutdown rpc server\n\tif r.grpcServer != nil {\n\t\tr.logger.Info(\"stopping grpc server...\")\n\t\tr.grpcServer.Stop()\n\t\tr.logger.Info(\"stopped grpc server successfully\")\n\t}\n\n\tr.state = server.Terminated\n\tr.logger.Info(\"stopped broker server successfully\")\n}", "func (s *Server) Stop() error {\n\tlevel.Info(s.logger).Log(\"msg\", \"stopping server\")\n\treturn s.server.Close()\n}", "func (s *Server) Stop() error {\n\t// Check if the server is running\n\tif !s.IsRunning() {\n\t\treturn errors.New(\"Attempted to stop a non-running server\")\n\t}\n\n\t// Shut down server gracefully, but wait no longer than a configured amount of seconds before halting\n\tctx, _ := context.WithTimeout(context.Background(), time.Duration(config.GetInstance().Server.Timeouts.ShutDown)*time.Second)\n\tif err := s.instance.Shutdown(ctx); err != nil {\n\t\treturn err\n\t}\n\n\ts.running = false\n\n\treturn nil\n}", "func (s *GrpcServer) Stop() {\n\ts.server.GracefulStop()\n}", "func (server *Server) Stop() {\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tif cancel != nil {\n\t\tserver.srv.Shutdown(ctx)\n\t\tserver.srv = nil\n\t}\n\tif server.hub != nil {\n\t\tserver.hub.stop()\n\t}\n}", "func (s *Server) Stop() {\n\tclose(s.channelQuit)\n}", "func Stop() error {\n\tDefaultServer.Options().Logger.Logf(log.InfoLevel, \"Stopping server\")\n\treturn DefaultServer.Stop()\n}", "func (s *Server) Stop() {\n\tdefer log.WithoutContext().Info(\"Server stopped\")\n\n\ts.tcpEntryPoints.Stop()\n\ts.udpEntryPoints.Stop()\n\n\ts.stopChan <- true\n}", "func (s *Server) Stop(timeout int) error {\n\tctx, cancel := context.WithTimeout(context.Background(), time.Second*time.Duration(timeout))\n\tdefer cancel()\n\treturn s.HTTP.Shutdown(ctx)\n}", "func Stop() error {\n\terr := applicationServer.Stop()\n\treturn err\n}", "func (o *HttpServer) GracefulStop() error {\n\treturn o.Server.Shutdown(context.Background())\n}", "func (server *HTTPRouterServer) Stop() error {\n\tvar err error\n\tif server.srv != nil {\n\t\tlog.Println(\"Shutdown server\")\n\t\tserver.router = httprouter.New()\n\t\terr = server.srv.Shutdown(context.Background())\n\t\tserver.srv = nil\n\t}\n\treturn err\n}", "func (s *Server) Stop() {\n\ts.stopChan <- struct{}{}\n\t_ = s.listener.Close()\n}", "func (s *GenericGrpcServer) Stop() {\n\tif s.Listener != nil {\n\t\tlog.Infof(\"Stopping service at %s\", s.Listener.Addr())\n\t}\n\tif s.Server != nil {\n\t\ts.Server.GracefulStop()\n\t}\n}", "func (s *Server) Shutdown() {\n\tclose(stop)\n}", "func (s *Server) Stop() {\n\ts.server.Stop()\n}", "func Stop(r *registry.Registry) error {\n\treturn r.Server.Shutdown(context.Background())\n}", "func (s *Server) Stop() error {\n\tif err := s.ctx.Gateway.Close(); err != nil {\n\t\treturn fmt.Errorf(\"close gateway backend error: %s\", err)\n\t}\n\tif err := s.ctx.Application.Close(); err != nil {\n\t\treturn fmt.Errorf(\"close application backend error: %s\", err)\n\t}\n\tif err := s.ctx.Controller.Close(); err != nil {\n\t\treturn fmt.Errorf(\"close network-controller backend error: %s\", err)\n\t}\n\n\tlog.Info(\"waiting for pending actions to complete\")\n\ts.wg.Wait()\n\treturn nil\n}", "func (s *daemonServer) Stop() {\n\ts.grpcServer.Stop()\n}", "func (r *server) Stop() {\n\t// TODO: pass context in as a parameter.\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\n\tif err := r.stopHTTPServers(ctx); err != nil {\n\t\tlog.WithError(err).Error(\"Some HTTP servers failed to shutdown.\")\n\t}\n\n\tr.Server.Stop()\n}", "func (s *Server) Stop() error {\n\tif s.listener == nil {\n\t\treturn errors.New(\"Server not started\")\n\t}\n\n\tif err := s.listener.Close(); err != nil {\n\t\treturn err\n\t}\n\n\treturn s.lastError\n}", "func (s *Server) Stop() error {\n\tif s.listener == nil {\n\t\treturn errors.New(\"Server not started\")\n\t}\n\n\tif err := s.listener.Close(); err != nil {\n\t\treturn err\n\t}\n\n\treturn s.lastError\n}", "func (s *Server) Stop() error {\n\tserverLog.Info(\"Stopping server\")\n\treturn s.server.Close()\n}", "func (s *Server) Stop() {\n\tlog.Infof(\"Stopping http server...\")\n\ts.server.Shutdown(context.Background())\n\tlog.Infof(\"HTTP server stopped.\")\n}", "func (s *Server) Stop() {\n\ts.quit <- true\n\t<-s.quit\n}", "func (s *Server) Shutdown(ctx context.Context) error {\n\t// logInfo(\"%v %v Shutdown...\", s.Handler.LogTag(), s.Listener.Addr())\n\tdefer logInfo(\"%v %v Shutdown\", s.Handler.LogTag(), s.Listener.Addr())\n\ts.running = false\n\ts.Listener.Close()\n\tselect {\n\tcase <-s.chStop:\n\tcase <-ctx.Done():\n\t\treturn ErrTimeout\n\t}\n\treturn nil\n}", "func (wsServer *WsServer) Stop() {\n\tif atomic.AddInt32(&wsServer.shutdown, 1) != 1 {\n\t\tLogger.log.Info(\"RPC server is already in the process of shutting down\")\n\t}\n\tLogger.log.Info(\"RPC server shutting down\")\n\tif wsServer.started != 0 {\n\t\twsServer.server.Close()\n\t}\n\tfor _, listen := range wsServer.config.HttpListenters {\n\t\tlisten.Close()\n\t}\n\tLogger.log.Warn(\"RPC server shutdown complete\")\n\twsServer.started = 0\n\twsServer.shutdown = 1\n}", "func (s *SimpleServer) Stop() {\n\tif s != nil {\n\t\ts.shutdownReq <- true\n\t}\n}", "func (f *RemoteRuntime) Stop() {\n\tf.server.Stop()\n}", "func (s *Server) Stop(log *logrus.Entry) {\n\ts.gracefulServer.Shutdown(context.Background())\n}", "func (s *Server) Stop() {\n\tclose(s.quit)\n\ts.listener.Close()\n\ts.eventLogger.Info(uint32(windows.NO_ERROR), fmt.Sprintf(\"remove all %+v\", s.proxy.portMappings))\n\tif err := s.proxy.removeAll(); err != nil {\n\t\ts.eventLogger.Warning(uint32(windows.ERROR_EXCEPTION_IN_SERVICE), err.Error())\n\t}\n\ts.stopped = true\n}", "func (htmlServer *HTMLServer) Stop() error {\n\n\tconst timeout = 5 * time.Second\n\tctx, cancel := context.WithTimeout(context.Background(), timeout)\n\tdefer cancel()\n\n\tlog.Println(Detail(\"SERVER : Service stopping.\"))\n\n\tif e := htmlServer.server.Shutdown(ctx); e != nil {\n\n\t\tif e := htmlServer.server.Close(); e != nil {\n\t\t\tlog.Printf(Warn(\"SERVER : Service stopping : Error=%s\"), e)\n\t\t\treturn e\n\t\t}\n\t}\n\n\thtmlServer.wg.Wait()\n\tlog.Println(Detail(\"SERVER : Stopped\"))\n\treturn nil\n}", "func (serv *Server) Stop() {\n\tfmt.Printf(\"Stopping listening\\n\")\n\tserv.grpcServer.Stop()\n\tfmt.Printf(\"Stopped...\\n\")\n}", "func (m *DevicePluginStub) Stop() error {\n\tglog.V(2).Infof(\"Stopping server %s\", m.SocketName())\n\n\tm.server.Stop()\n\tclose(m.stop)\n\n\tif err := m.waitTimeout(); err != nil {\n\t\treturn err\n\t}\n\n\treturn m.cleanup()\n}", "func (s *Server) Stop() {\n\ts.logger.Notice(\"Shutting down %v server...\", s.name)\n\ts.killChan <- true\n\ts.logger.Notice(\"Server %v shutdown complete.\", s.name)\n}", "func StopServer() {\n\tif stopCh != nil {\n\t\tclose(stopCh)\n\t}\n}", "func (s * Service)Stop() {\n\tlog.Println(\"Stopping Server!\")\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\tif s.Server != nil {\n\t\tlog.Println(\" Initiating Server Shutdown!\")\n\t\tif err := s.Server.Shutdown(ctx); err != nil {\n\t\t\t// handle err\n\t\t\tlog.Println(\"Error while stopping Server!\", err)\n\t\t}\n\t}\n}", "func (server *Server) stop() {\n\tfor address, connection := range server.connections {\n\t\tif server.breakConnection(connection) {\n\t\t\tserver.Logger.Info(\"Close connection at\", address)\n\t\t} else {\n\t\t\tserver.Logger.Warning(\"Impossible to close connection at\", address)\n\t\t}\n\t}\n\tif server.tcp_socket != nil {\n//\t\tfor conn_type, socket := range server.tcp_socket {\n//\t\t\terr := socket.Close()\n//\t\t\tif err != nil {\n//\t\t\t\tserver.Logger.Error(\"Error occured during closing \" + conn_type + \" socket:\", err)\n//\t\t\t}\n//\t\t}\n\t\terr := server.tcp_socket.Close()\n\t\tif err != nil {\n\t\t\tserver.Logger.Error(\"Error occured during closing \" + \"tcp\" + \" socket:\", err)\n\t\t}\n\t\tserver.tcp_socket = nil\n\t} else {\n\t\tserver.Logger.Error(\"Server can't be stoped, because socket is undefined.\")\n\t}\n\tserver.Logger.Info(\"Waiting for ending process of goroutines...\")\n\tserver.Wait()\n\tserver.storage.FlushAll()\n}", "func (s *Server) Shutdown() {\n\t// TODO(aditya) shut down workers and socket readers\n\ts.logger.Info(\"Shutting down server gracefully\")\n\tclose(s.shutdown)\n\tif s.FlushOnShutdown {\n\t\tctx, cancel := context.WithTimeout(context.Background(), s.Interval)\n\t\ts.Flush(ctx)\n\t\tcancel()\n\t}\n\tgraceful.Shutdown()\n\tfor _, source := range s.sources {\n\t\tsource.source.Stop()\n\t}\n\n\t// Close the gRPC connection for forwarding\n\tif s.grpcForwardConn != nil {\n\t\ts.grpcForwardConn.Close()\n\t}\n}", "func (s *Server) Stop() (err error) {\n\tif runtime.GOOS != \"windows\" {\n\t\t// force connections to close after timeout\n\t\tdone := make(chan struct{})\n\t\tgo func() {\n\t\t\ts.dnsWg.Done() // decrement our initial increment used as a barrier\n\t\t\ts.dnsWg.Wait()\n\t\t\tclose(done)\n\t\t}()\n\n\t\t// Wait for remaining connections to finish or\n\t\t// force them all to close after timeout\n\t\tselect {\n\t\tcase <-time.After(s.graceTimeout):\n\t\tcase <-done:\n\t\t}\n\t}\n\n\t// Close the listener now; this stops the server without delay\n\ts.m.Lock()\n\tfor _, s1 := range s.server {\n\t\t// We might not have started and initialized the full set of servers\n\t\tif s1 != nil {\n\t\t\terr = s1.Shutdown()\n\t\t}\n\t}\n\ts.m.Unlock()\n\treturn\n}", "func (g *RESTFrontend) Stop() {\n\tif g.IsRunning {\n\t\tg.Server.Shutdown(context.TODO())\n\t\tg.Listener.Close()\n\t}\n}", "func (server *Server) Stop() {\n\tserver.grpcServer.Stop()\n}", "func (s *Server) Stop() {\n\tclose(s.stopChan)\n\tfor _, l := range s.listeners {\n\t\tl.Stop()\n\t}\n\tif s.Statistics != nil {\n\t\ts.Statistics.Stop()\n\t}\n\ts.health.Deregister() //nolint:errcheck\n\ts.Started = false\n}", "func (s *GrpcServer) Stop() {\n\ts.lock.Lock()\n\tdefer s.lock.Unlock()\n\n\tif !s.running {\n\t\treturn\n\t}\n\n\ts.server.Stop()\n\ts.wg.Wait()\n\ts.running = false\n}", "func (p *PrivNegAPI) Stop() {\n\tif err := p.server.Shutdown(nil); err != nil {\n\t\tpanic(err)\n\t}\n}", "func shutDown(ctx context.Context, logger *log.Logger, srv *http.Server) {\n\tquit := make(chan os.Signal, 1)\n\tsignal.Notify(quit, os.Interrupt)\n\t<-quit\n\n\tlogger.Info(\"msg\", \"Shutting down HTTP/REST gateway server...\")\n\n\tctx, cancel := context.WithTimeout(ctx, 5*time.Second)\n\tdefer cancel()\n\n\tif err := srv.Shutdown(ctx); err != nil {\n\t\tlogger.Error(\"err\", fmt.Sprintf(\"Shutdown HTTP/REST gateway server: %s\", err.Error()))\n\t}\n\n\tlogger.Info(\"msg\", \"Shutdown done HTTP/REST gateway server\")\n}", "func (s *ServerFx) Stop() error {\n\treturn s.app.Stop(context.Background())\n}", "func (s *Server) Stop() error {\n\terr := syscall.Kill(-s.cmd.Process.Pid, syscall.SIGTERM)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ttime.AfterFunc(3*time.Second, func() {\n\t\tif s.Alive() {\n\t\t\ts.cmd.Process.Signal(syscall.SIGKILL)\n\t\t}\n\t})\n\n\t_ = s.cmd.Wait()\n\treturn nil\n}", "func (service *APIService) Stop() {\n\tservice.GrpcServer.GracefulStop()\n\tservice.HttpServer.Close()\n}", "func (app *App) Stop() error {\n\tctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)\n\tdefer cancel()\n\n\tif app.Server() != nil {\n\t\treturn app.Server().Shutdown(ctx)\n\t}\n\n\treturn nil\n}", "func (s *Server) Stop() {\n\tdefer jww.INFO.Println(\"Server stopped\")\n\ts.stopChan <- true\n}", "func (e *Server) Stop() {\n\tlogx.Close()\n}", "func (gw *GrpcWrapper) Stop() error {\n\tif gw.server == nil {\n\t\treturn nil\n\t}\n\tgw.server.GracefulStop()\n\tportErr := gw.ln.Close()\n\tgrpcErr := gw.WaitForTermination()\n\tgw.server = nil\n\tgw.ln = nil\n\tif grpcErr != nil {\n\t\treturn grpcErr\n\t}\n\treturn portErr\n}", "func (s *RegistryServer) Stop() error {\n\treturn s.listener.Close()\n}", "func (s *Server) Stop() {\n\ts.cmd.Process.Kill()\n\tos.RemoveAll(s.DataDir)\n}", "func (s *Server) Stop(ctx context.Context) error {\n\treturn s.Router.Shutdown(ctx)\n}", "func (t *TcpServer) Stop() {\n\tt.isRunning = false\n}", "func (bs *BusinessServer) Stop() {\n\t// close datamanager server gRPC connection when server exit.\n\tif bs.dataMgrConn != nil {\n\t\tbs.dataMgrConn.Close()\n\t}\n\n\t// close templateserver gRPC connection when server exit.\n\tif bs.templateSvrConn != nil {\n\t\tbs.templateSvrConn.Close()\n\t}\n\n\t// close bcs controller gRPC connection when server exit.\n\tif bs.bcsControllerConn != nil {\n\t\tbs.bcsControllerConn.Close()\n\t}\n\n\t// close gse controller gRPC connection when server exit.\n\tif bs.gseControllerConn != nil {\n\t\tbs.gseControllerConn.Close()\n\t}\n\n\t// unregister service.\n\tif bs.service != nil {\n\t\tbs.service.UnRegister()\n\t}\n\n\t// close logger.\n\tlogger.CloseLogs()\n}", "func (s *Server) Stop() {\n\t// TODO write to `s.stop` channel\n}", "func (s *Server) Stop() {\n\ts.quitMtx.Lock()\n\tselect {\n\tcase <-s.quit:\n\t\ts.quitMtx.Unlock()\n\t\treturn\n\tdefault:\n\t}\n\n\t// Stop the connected wallet and chain server, if any.\n\ts.handlerMu.Lock()\n\twallet := s.wallet\n\tchainClient := s.chainClient\n\ts.handlerMu.Unlock()\n\tif wallet != nil {\n\t\twallet.Stop()\n\t}\n\tif chainClient != nil {\n\t\tchainClient.Stop()\n\t}\n\n\t// Stop all the listeners.\n\tfor _, listener := range s.listeners {\n\t\terr := listener.Close()\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Cannot close listener `%s`: %v\",\n\t\t\t\tlistener.Addr(), err)\n\t\t}\n\t}\n\n\t// Signal the remaining goroutines to stop.\n\tclose(s.quit)\n\ts.quitMtx.Unlock()\n\n\t// First wait for the wallet and chain server to stop, if they\n\t// were ever set.\n\tif wallet != nil {\n\t\twallet.WaitForShutdown()\n\t}\n\tif chainClient != nil {\n\t\tchainClient.WaitForShutdown()\n\t}\n\n\t// Wait for all remaining goroutines to exit.\n\ts.wg.Wait()\n}", "func StopServer() {\n\tserverMutex.Lock()\n\tdefer serverMutex.Unlock()\n\tif !serverStarted {\n\t\treturn\n\t}\n\terr := server.Shutdown(context.Background())\n\tif err == nil {\n\t\tlog.Println(\"server: stopped\")\n\t} else {\n\t\tlog.Println(\"server:\", err)\n\t}\n}", "func (l *listener) Stop() error {\n\tif l.server == nil {\n\t\treturn l.catch(ErrEmptyListener)\n\t}\n\n\tctx, cancel := context.WithTimeout(context.Background(), l.shutdownTimeout)\n\tdefer cancel()\n\n\tch := make(chan error, 1)\n\n\tgo func() {\n\t\tch <- l.catch(l.server.Shutdown(ctx))\n\t}()\n\n\tselect {\n\tcase <-ctx.Done():\n\t\treturn nil\n\tcase err := <-ch:\n\t\treturn err\n\t}\n}", "func (s *Server) Shutdown(ctx context.Context) error {\n\twg := &sync.WaitGroup{}\n\tc := make(chan struct{})\n\n\tgo func() {\n\t\tdefer close(c)\n\t\ts.GracefulStop()\n\t\twg.Wait()\n\t}()\n\n\tselect {\n\tcase <-c:\n\t\treturn nil\n\tcase <-ctx.Done():\n\t\treturn ctx.Err()\n\t}\n}", "func (w *Web) Stop() error {\n\tw.L(\"Stopping web server on %s:%s\", w.Address, w.Port)\n\tctx, cancel := context.WithTimeout(context.Background(), nonZeroDuration(w.Timeouts.Shutdown, time.Second*30))\n\tdefer cancel()\n\terr := w.Shutdown(ctx)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tw.running = false\n\treturn nil\n}", "func (rm *RouteMachine) Stop() error {\n\treturn rm.server.Shutdown(context.Background())\n}", "func Stop(server *TestDex) {\n\tserver.webServer.CloseClientConnections()\n\tserver.webServer.Close()\n}", "func (s *Server) Close() error {\n\ts.mixerContext.Server.GracefulStop()\n\t<-s.shutdown\n\tclose(s.shutdown)\n\ts.mixerContext = nil\n\treturn nil\n}" ]
[ "0.74565756", "0.7377595", "0.73199743", "0.73053926", "0.730005", "0.72805965", "0.72759473", "0.7261128", "0.72586036", "0.7253675", "0.7249387", "0.72427094", "0.72398686", "0.7236667", "0.7225987", "0.7202721", "0.71949255", "0.71947384", "0.7167611", "0.7159789", "0.7159789", "0.71319675", "0.7099483", "0.7094652", "0.7092021", "0.7091931", "0.7091898", "0.7091898", "0.7087458", "0.708608", "0.7085011", "0.7051409", "0.7025988", "0.70190114", "0.7018603", "0.7014941", "0.7014394", "0.7010269", "0.6974289", "0.69733125", "0.6962267", "0.696177", "0.69507724", "0.69468224", "0.69432473", "0.6943028", "0.69430053", "0.69372755", "0.6936108", "0.69342124", "0.69326586", "0.6925442", "0.6918816", "0.6915428", "0.6915428", "0.6898838", "0.6894494", "0.6884626", "0.6879914", "0.68772215", "0.68747944", "0.6874172", "0.6862367", "0.68334454", "0.68332326", "0.68315876", "0.68275857", "0.6824847", "0.6821711", "0.68179524", "0.6813221", "0.6807998", "0.68011594", "0.67998433", "0.67953956", "0.6775727", "0.67727154", "0.67694205", "0.6767914", "0.6759668", "0.6753822", "0.673791", "0.6735874", "0.6735497", "0.67105085", "0.67037797", "0.66999304", "0.6696099", "0.6686369", "0.66806465", "0.6673014", "0.6663874", "0.6653885", "0.6651033", "0.66480637", "0.6622976", "0.66202736", "0.662027", "0.6612326", "0.6608321" ]
0.70794505
31
RegisterBootstrapUser registers the bootstrap user with appropriate privileges
func (s *Server) RegisterBootstrapUser(user, pass, affiliation string) error { // Initialize the config, setting defaults, etc log.Debugf("Register bootstrap user: name=%s, affiliation=%s", user, affiliation) if user == "" || pass == "" { return errors.New("Empty identity name and/or pass not allowed") } id := CAConfigIdentity{ Name: user, Pass: pass, Type: "client", Affiliation: affiliation, MaxEnrollments: 0, // 0 means to use the server's max enrollment setting Attrs: map[string]string{ attr.Roles: "*", attr.DelegateRoles: "*", attr.Revoker: "true", attr.IntermediateCA: "true", attr.GenCRL: "true", attr.RegistrarAttr: "*", attr.AffiliationMgr: "true", }, } registry := &s.CA.Config.Registry registry.Identities = append(registry.Identities, id) log.Debugf("Registered bootstrap identity: %+v", id) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func registerUser() {\n\tgoes.Register(\n\t\t&User{},\n\t\tFirstNameUpdatedV1{},\n\t\tCreatedV1{},\n\t)\n}", "func (r *RBAC) RegisterUser(system, uid string, roles ...string) error {\n\tu := model.NewUserPermModel(system, uid, roles...)\n\treturn r.User.CreateUserPermModel(u)\n}", "func Register(r *http.Request) (bool, error) {\n\tusername := r.FormValue(\"username\")\n\tnewPassword := r.FormValue(\"password\")\n\tconfirmPassword := r.FormValue(\"confirm_password\")\n\tu, err := models.GetUserByUsername(username)\n\t// If we have an error which is not simply indicating that no user was found, report it\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false, err\n\t}\n\tu = models.User{}\n\t// If we've made it here, we should have a valid username given\n\t// Check that the passsword isn't blank\n\tif newPassword == \"\" {\n\t\treturn false, ErrEmptyPassword\n\t}\n\t// Make sure passwords match\n\tif newPassword != confirmPassword {\n\t\treturn false, ErrPasswordMismatch\n\t}\n\t// Let's create the password hash\n\th, err := bcrypt.GenerateFromPassword([]byte(newPassword), bcrypt.DefaultCost)\n\tif err != nil {\n\t\treturn false, err\n\t}\n\tu.Username = username\n\tu.Hash = string(h)\n\tu.ApiKey = GenerateSecureKey()\n\terr = models.PutUser(&u)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\treturn false, err\n\t}\n\treturn true, nil\n}", "func doRegisterUser (w http.ResponseWriter, r *http.Request ) {\n\tusername := r.FormValue(\"username\")\n\temail := r.FormValue(\"email\")\n\tpassword := r.FormValue(\"password\")\n\tconfirm := r.FormValue(\"confirm\")\n\n\t// validate everything\n\tif len(username) < 6 {\n\t\thttp.Error(w, \"username too short\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tif len(password) < 6 {\n\t\thttp.Error(w, \"password too short\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tif password != confirm {\n\t\thttp.Error(w, \"password doesn't match\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\t// go ahead and create the user\n\tpwBytes, bErr := bcrypt.GenerateFromPassword([]byte(password), 14)\n\tif bErr != nil {\n\t\thttp.Error(w, bErr.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\tpwHash := string(pwBytes)\n\t// registering a free account\n\tu, cErr := CreateNewUser(r, username, pwHash, email, 0, \"free\")\n\tif cErr != nil {\n\t\tif (cErr.Code == ERR_ACCOUNT_ALREADY_EXISTS) {\n\t\t\tlogger.StdLogger.LOG(logger.INFO, webber.GetCorrelationId(r), fmt.Sprintf(\"Attempt to create existing username: %s\", username), nil)\n\t\t\thttp.Error(w, cErr.Code, http.StatusBadRequest)\n\t\t} else {\n\t\t\tlogger.StdLogger.LOG(logger.ERROR, webber.GetCorrelationId(r), fmt.Sprintf(\"Error creating user: %s : %s\", username, cErr.Error()), nil)\n\t\t\thttp.Error(w, cErr.Code, http.StatusInternalServerError)\n\t\t}\n\t\treturn\n\t}\n\n\t// okay, everything worked, create a session\n\tsessionData := UserSessionData{Username:u.Username}\n\t_, err := webber.MakeSession(w, sessionData);\n\tif err != nil {\n\t\tlogger.StdLogger.LOG(logger.ERROR, \"\", fmt.Sprintf(\"unable to create session: %s\", err.Error()), nil)\n\t\thttp.Error(w, \"Please Log in\", http.StatusUnauthorized)\n\t}\n\twebber.ReturnJson(w,u)\n\treturn\t\t\n\n}", "func Register(gate module.Gate, dataStorage *module.DataStorage) {\n\tmodule := NewUser(dataStorage)\n\t// user manage\n\tgate.RegisterRoute(\"/getUsers\", \"POST\", module.GetUsers)\n\tgate.RegisterRoute(\"/addUsers\", \"POST\", module.AddUsers)\n\tgate.RegisterRoute(\"/updateUsers\", \"POST\", module.UpdateUsers)\n\tgate.RegisterRoute(\"/delUsers\", \"POST\", module.DelUsers)\n\tgate.RegisterRoute(\"/submitRecord\", \"POST\", module.GetSubmitRecord)\n}", "func Register(authMod common.Authorizer, d models.UserStore, w http.ResponseWriter, r *http.Request) {\n\n\t//get data from request\n\tdecoder := json.NewDecoder(r.Body)\n\tbody := models.User{}\n\terr := decoder.Decode(&body)\n\tif err != nil {\n\t\tcommon.DisplayAppError(w, err, \"Invalid user data\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t//create new user\n\tuserId, err := d.CreateUser(body)\n\tif err != nil {\n\t\tcommon.DisplayAppError(w, err, \"Invalid user data\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\t//create JWT\n\tjwt, err := authMod.GenerateJWT(\n\t\tbody.UserName,\n\t\tuserId,\n\t)\n\tif err != nil {\n\t\tcommon.DisplayAppError(w, err, \"fail up\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\treturnUser := CreatedUser{\n\t\tbody.UserName,\n\t\tbody.Email,\n\t\tjwt,\n\t}\n\tcommon.WriteJson(w, \"Succesfully registered user\", returnUser, http.StatusCreated)\n}", "func (s *Service) Register(user User) error {\n\tpassword := user.Password\n\tencrypted, err := s.encrypt.Encrypt(password)\n\tif err != nil {\n\t\tlog.Errorf(\"Password encryption failed: %s\", err)\n\t\treturn ErrCouldNotEncryptPassword\n\t}\n\tuser.Password = encrypted\n\ts.store.SaveUser(user)\n\treturn nil\n}", "func (rsh *routeServiceHandler) Register(w http.ResponseWriter, r *http.Request) {\n\n\tuser := &models.User{}\n\terr := json.NewDecoder(r.Body).Decode(user)\n\tif err != nil {\n\t\tsendResponse(w, r, StatusError, err.Error(), nil)\n\t\treturn\n\t}\n\n\tpass, err := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)\n\tif err != nil {\n\t\tsendResponse(w, r, StatusError, err.Error(), nil)\n\t\treturn\n\t}\n\n\tuser.Password = string(pass)\n\n\tcreatedUser, err := rsh.ctlr.Register(*user)\n\tif err != nil {\n\t\tsendResponse(w, r, StatusError, err.Error(), createdUser)\n\t\treturn\n\t}\n\n\tsendResponse(w, r, StatusSuccess, \"\", createdUser)\n\treturn\n}", "func (t *SimpleChaincode) registerUser(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\tvar err error\n\n\tif len(args) != 16 {\n\t\treturn nil, errors.New(\"Incorrect number of arguments. Expecting 8\")\n\t}\n\n\t//input sanitation\n\tfmt.Println(\"- start registration\")\n\tif len(args[0]) <= 0 {\n\t\treturn nil, errors.New(\"0th argument must be a non-empty string\")\n\t}\n\tif len(args[1]) <= 0 {\n\t\treturn nil, errors.New(\"1st argument must be a non-empty string\")\n\t}\n\tif len(args[2]) <= 0 {\n\t\treturn nil, errors.New(\"2nd argument must be a non-empty string\")\n\t}\n\tif len(args[3]) <= 0 {\n\t\treturn nil, errors.New(\"3rd argument must be a non-empty string\")\n\t}\n\tif len(args[4]) <= 0 {\n\t\treturn nil, errors.New(\"4th argument must be a non-empty string\")\n\t}\n\tif len(args[5]) <= 0 {\n\t\treturn nil, errors.New(\"5th argument must be a non-empty string\")\n\t}\n\n\tif len(args[8]) <= 0 {\n\t\treturn nil, errors.New(\"8th argument must be a non-empty string\")\n\t}\n\tif len(args[13]) <= 0 {\n\t\treturn nil, errors.New(\"13th argument must be a non-empty string\")\n\t}\n\tif len(args[14]) <= 0 {\n\t\treturn nil, errors.New(\"14th argument must be a non-empty string\")\n\t}\n\tif len(args[15]) <= 0 {\n\t\treturn nil, errors.New(\"15th argument must be a non-empty string\")\n\t}\n\tuser := User{}\n\tuser.Id, err = strconv.Atoi(args[0])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get id as cannot convert it to int\")\n\t}\n\tuser.UserType = args[1]\n\tuser.FisrtName = args[2]\n\tuser.LastName = args[3]\n\tuser.Email = args[4]\n\tuser.Password = args[5]\n\t//user.ReTypePassword=args[6]\n\tuser.Operationalemail = args[6]\n\tuser.Phone, err = strconv.Atoi(args[7])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get phone as cannot convert it to int\")\n\t}\n\tuser.RelationshipManagerEmail = args[8]\n\tuser.CustomersLimit, err = strconv.Atoi(args[9])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get CustomersLimit as cannot convert it to int\")\n\t}\n\tuser.FeePercentage, err = strconv.Atoi(args[10])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get FeePercentage as cannot convert it to int\")\n\t}\n\tuser.InterestEarning, err = strconv.Atoi(args[11])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get InterestEarning as cannot convert it to int\")\n\t}\n\tuser.AccountNo, err = strconv.Atoi(args[12])\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get AccountNo as cannot convert it to int\")\n\t}\n\tuser.IfscCode = args[13]\n\tuser.Pan = args[14]\n\n\tuser.Address = args[15]\n\n\tfmt.Println(\"user\", user)\n\t// get users data from chaincode\n\tUserAsBytes, err := stub.GetState(\"getvfmuser\")\n\tif err != nil {\n\t\treturn nil, errors.New(\"Failed to get users\")\n\t}\n\tvar allusers AllUsers\n\tjson.Unmarshal(UserAsBytes, &allusers) //un stringify it aka JSON.parse()\n\n\tallusers.Userlist = append(allusers.Userlist, user)\n\tfmt.Println(\"allusers\", allusers.Userlist) //append usersdetails to allusers[]\n\tfmt.Println(\"! appended user to allusers\")\n\tjsonAsBytes, _ := json.Marshal(allusers)\n\tfmt.Println(\"json\", jsonAsBytes)\n\terr = stub.PutState(\"getvfmuser\", jsonAsBytes) //rewrite allusers[]\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfmt.Println(\"- end user_register\")\n\treturn nil, nil\n}", "func (env *Env) RegisterUser(c *gin.Context) {\n\n\ttype registerRequest struct {\n\t\tUsername string `json:\"username\"`\n\t\tPassword string `json:\"password\"`\n\t\tDeviceID string `json:\"device_id\"`\n\t}\n\n\ttype registerResponse struct {\n\t\tAccessToken string `json:\"access_token\"`\n\t\tRefreshToken string `json:\"refresh_token\"`\n\t\tUser mysql.User `json:\"user\"`\n\t\tResetCode string `json:\"reset_code\"`\n\t}\n\n\t//decode request body\n\tjsonData, err := ioutil.ReadAll(c.Request.Body)\n\tif err != nil {\n\t\tLog.WithField(\"module\", \"handler\").WithError(err)\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, errs.RQST001)\n\t\treturn\n\t}\n\n\tvar request registerRequest\n\terr = json.Unmarshal(jsonData, &request)\n\tif err != nil {\n\t\tLog.WithField(\"module\", \"handler\").WithError(err)\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, errs.RQST001)\n\t\treturn\n\t}\n\n\tif request.Username == \"\" || request.Password == \"\" || request.DeviceID == \"\" {\n\t\tLog.WithField(\"module\", \"handler\").Error(\"Empty Fields in Request Body\")\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, errs.RQST002)\n\t\treturn\n\t}\n\n\tvar empty int64\n\tresult := env.db.Model(&mysql.User{}).Count(&empty)\n\tif result.Error != nil {\n\t\tLog.WithField(\"module\", \"handler\").WithError(result.Error)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\treturn\n\t}\n\n\tuser := mysql.User{}\n\tperms := mysql.Permissions{}\n\tdefaultGroup := mysql.UserGroup{}\n\n\tif empty == 0 {\n\n\t\tperms.Admin = true\n\t\tperms.CanEdit = true\n\n\t\tdefaultGroupPerms := mysql.Permissions{CanEdit: false, Admin: false}\n\n\t\tdefaultGroup.Name = \"default\"\n\n\t\tresult = env.db.Save(&defaultGroupPerms)\n\t\tif result.Error != nil {\n\t\t\tLog.WithField(\"module\", \"handler\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\n\t\tdefaultGroup.Permissions = defaultGroupPerms\n\n\t\tresult = env.db.Save(&defaultGroup)\n\t\tif result.Error != nil {\n\t\t\tLog.WithField(\"module\", \"handler\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\n\t} else {\n\t\tvar exists int64\n\t\t//Check if Username already exists in Database\n\t\tresult = env.db.Model(&user).Where(\"upper(username) = upper(?)\", user.Username).Count(&exists)\n\t\tif result.Error != nil {\n\t\t\tLog.WithField(\"module\", \"handler\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\t\tLog.WithField(\"module\", \"handler\").Debug(\"Users found: \", exists)\n\n\t\tif exists != 0 {\n\t\t\tLog.WithField(\"module\", \"handler\").Error(\"Username already exists in Database\")\n\t\t\tc.AbortWithStatusJSON(http.StatusForbidden, errs.AUTH004)\n\t\t\treturn\n\t\t}\n\n\t\tperms.Admin = false\n\t\tperms.CanEdit = false\n\n\t\tdefaultGroup.Name = \"default\"\n\t\tresult = env.db.Model(&defaultGroup).Find(&defaultGroup)\n\t\tif result.Error != nil {\n\t\t\tLog.WithField(\"module\", \"handler\").WithError(result.Error)\n\t\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\t\treturn\n\t\t}\n\n\t}\n\n\t//Create permission entry for new user in permissions table\n\tresult = env.db.Save(&perms)\n\tif result.Error != nil {\n\t\tLog.WithField(\"module\", \"sql\").WithError(result.Error)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\treturn\n\t}\n\n\tuser.Username = request.Username\n\tuser.Password = request.Password\n\tuser.AvatarID = \"default\"\n\tuser.PermID = perms.ID\n\tuser.UserGroups = append(user.UserGroups, &defaultGroup)\n\tuser.ResetCode = utils.GenerateCode()\n\n\t//Save new user to users database\n\tresult = env.db.Save(&user)\n\tif result.Error != nil {\n\t\tLog.WithField(\"module\", \"sql\").WithError(result.Error)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ001)\n\t\treturn\n\t}\n\n\t//Generate JWT AccessToken\n\taccessToken, err := utils.JWTAuthService(config.JWTAccessSecret).GenerateToken(user.ID, request.DeviceID, time.Hour*24)\n\tif err != nil {\n\t\tLog.WithField(\"module\", \"jwt\").WithError(err)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.AUTH002)\n\t\treturn\n\t}\n\n\t//Add AccessToken to Redis\n\terr = env.rdis.AddPair(fmt.Sprint(user.ID), accessToken, time.Hour*24)\n\tif err != nil {\n\t\tLog.WithField(\"module\", \"redis\").WithError(err).Error(\"Error adding AccessToken to Redis.\")\n\t\terr = nil\n\t}\n\n\t//Generate RefreshToken\n\trefreshToken, err := utils.JWTAuthService(config.JWTRefreshSecret).GenerateToken(user.ID, request.DeviceID, time.Hour*24)\n\tif err != nil {\n\t\tLog.WithField(\"module\", \"jwt\").WithError(err)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.AUTH002)\n\t\treturn\n\t}\n\n\tuser.RefreshToken = refreshToken\n\n\t//Save RefreshToken to Database\n\tresult = env.db.Save(&user)\n\tif result.Error != nil {\n\t\tLog.WithField(\"module\", \"sql\").WithError(result.Error)\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, errs.DBSQ002)\n\t\treturn\n\t}\n\n\tc.JSON(200, registerResponse{AccessToken: accessToken, RefreshToken: refreshToken, User: user, ResetCode: user.ResetCode})\n}", "func (a ServerAgent) RegisterUser(c *gin.Context) {\n\tauthorization, ok := a.authorize(c)\n\tif !ok {\n\t\treturn\n\t}\n\n\tif !authorization.Admin {\n\t\tc.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{\"error\": \"this endpoint is only accessible to users with administrative priveleges\"})\n\t\treturn\n\t}\n\n\tvar req RegistrationRequest\n\terr := c.ShouldBindJSON(&req)\n\tif err != nil {\n\t\t//TODO is this error message safe to expose?\n\t\tc.AbortWithStatusJSON(http.StatusBadRequest, gin.H{\"error\": err.Error()})\n\t\treturn\n\t}\n\n\terr = a.dbClient.RegisterUser(c, req.UserUUID, req.Email)\n\tif err != nil {\n\t\ta.logger.Errorf(\"register user `%s` failed: %s\", req.UserUUID, err.Error())\n\t\tc.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{\"error\": \"user registration failed - see logs for details\"})\n\t\treturn\n\t}\n\n\tc.JSON(http.StatusOK, gin.H{\n\t\t\"user_uuid\": req.UserUUID,\n\t\t\"email\": req.Email,\n\t})\n}", "func Register(\n\tc *gin.Context,\n\tuserService service.UserCommander,\n\tdispatcher queue.Publisher,\n) {\n\tvar req ar.RegisterRequest\n\tif isValid, errors := validation.ValidateRequest(c, &req); !isValid {\n\t\thttp.BadRequest(c, http.Errors(errors))\n\t\treturn\n\t}\n\n\tuser, err := userService.Create(c.Request.Context(), request.UserCreateRequest{\n\t\tFirstName: req.FirstName,\n\t\tLastName: req.LastName,\n\t\tEmail: req.Email,\n\t\tPassword: req.Password,\n\t\tRole: identityEntity.RoleConsumer,\n\t})\n\n\tif err != nil {\n\t\thttp.BadRequest(c, http.Errors{err.Error()})\n\t\treturn\n\t}\n\n\traiseSuccessfulRegistration(user.GetID(), dispatcher)\n\n\thttp.Created(c, http.Data{\n\t\t\"User\": user,\n\t}, nil)\n}", "func RegisterUser(db *gorm.DB, w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"register\")\n\truser := model.RUser{}\n\tdecoder := json.NewDecoder(r.Body)\n\tif err := decoder.Decode(&ruser); err != nil {\n\t\tRespondError(w, http.StatusBadRequest, \"\")\n\t\tlog.Println(\"decode:\", err.Error())\n\t\treturn\n\t}\n\tdefer r.Body.Close()\n\n\thashedPassword, err := bcrypt.GenerateFromPassword([]byte(ruser.Password), 8)\n\tif err != nil {\n\t\tRespondError(w, http.StatusInternalServerError, \"\")\n\t\tlog.Println(\"hash:\", err.Error())\n\t\treturn\n\t}\n\n\tid, err := uuid.NewUUID()\n\tif err != nil {\n\t\tRespondError(w, http.StatusInternalServerError, \"\")\n\t}\n\n\tuser := model.User{\n\t\tName: ruser.Name,\n\t\tUsername: ruser.Username,\n\t\tPassword: string(hashedPassword),\n\t\tUUID: id.String(),\n\t}\n\n\tif err := db.Save(&user).Error; err != nil {\n\t\tRespondError(w, http.StatusInternalServerError, \"\")\n\t\tlog.Println(\"save:\", err.Error())\n\t\treturn\n\t}\n\tRespondJSON(w, http.StatusCreated, user)\n}", "func UserRegister(router *gin.RouterGroup) {\n\trouter.POST(\"/register\", Register)\n\trouter.POST(\"/login\", Login)\n\trouter.POST(\"/logout\", Logout)\n}", "func (h *Handler) Register(w http.ResponseWriter, r *http.Request) {\n\n\tvar d UserCreateRequest\n\tif err := json.NewDecoder(r.Body).Decode(&d); err != nil {\n\t\trender.BadRequest(w, r, \"invalid json string\")\n\t\treturn\n\t}\n\tuser, err := h.Client.User.Create().\n\t\tSetEmail(d.Email).\n\t\tSetName(d.Name).\n\t\tSetPassword(d.Password).\n\t\tSave(r.Context())\n\tif err != nil {\n\t\tfmt.Printf(\"%v\", err.Error())\n\t\trender.InternalServerError(w, r, \"Failed to register the user\")\n\t\treturn\n\t}\n\tfmt.Println(\"User registered successfully\")\n\trender.OK(w, r, user)\n}", "func AcceptRegisterUser(w http.ResponseWriter, r *http.Request) {\n\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\turlVars := mux.Vars(r)\n\tregUUID := urlVars[\"uuid\"]\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\trefUserUUID := gorillaContext.Get(r, \"auth_user_uuid\").(string)\n\n\tru, err := auth.FindUserRegistration(regUUID, auth.PendingRegistrationStatus, refStr)\n\tif err != nil {\n\n\t\tif err.Error() == \"not found\" {\n\t\t\terr := APIErrorNotFound(\"User registration\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\tuserUUID := uuid.NewV4().String() // generate a new userUUID to attach to the new project\n\ttoken, err := auth.GenToken() // generate a new user token\n\tcreated := time.Now().UTC()\n\t// Get Result Object\n\tres, err := auth.CreateUser(userUUID, ru.Name, ru.FirstName, ru.LastName, ru.Organization, ru.Description,\n\t\t[]auth.ProjectRoles{}, token, ru.Email, []string{}, created, refUserUUID, refStr)\n\n\tif err != nil {\n\t\tif err.Error() == \"exists\" {\n\t\t\terr := APIErrorConflict(\"User\")\n\t\t\trespondErr(w, err)\n\t\t\treturn\n\t\t}\n\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// update the registration\n\terr = auth.UpdateUserRegistration(regUUID, auth.AcceptedRegistrationStatus, refUserUUID, created, refStr)\n\tif err != nil {\n\t\tlog.Errorf(\"Could not update registration, %v\", err.Error())\n\t}\n\n\t// Output result to JSON\n\tresJSON, err := res.ExportJSON()\n\tif err != nil {\n\t\terr := APIErrExportJSON()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Write response\n\trespondOK(w, []byte(resJSON))\n}", "func (w *ServerInterfaceWrapper) RegisterUser(ctx echo.Context) error {\n\tvar err error\n\n\t// Invoke the callback with all the unmarshalled arguments\n\terr = w.Handler.RegisterUser(ctx)\n\treturn err\n}", "func RegisterUser(rw http.ResponseWriter, r *http.Request, enc encoding.Encoder) string {\n\tvar err error\n\tvar user customer.CustomerUser\n\tuser.Name = r.FormValue(\"name\")\n\tuser.Email = r.FormValue(\"email\")\n\tuser.CustomerID, _ = strconv.Atoi(r.FormValue(\"customerID\"))\n\t// user.Active, _ = strconv.ParseBool(r.FormValue(\"isActive\"))\n\tuser.Location.Id, _ = strconv.Atoi(r.FormValue(\"locationID\"))\n\tuser.Sudo, _ = strconv.ParseBool(r.FormValue(\"isSudo\"))\n\tuser.CustID, _ = strconv.Atoi(r.FormValue(\"cust_ID\"))\n\tuser.NotCustomer, _ = strconv.ParseBool(r.FormValue(\"notCustomer\"))\n\tuser.Current = user.NotCustomer\n\tuser.Active = true // forcing active status\n\n\tgenPass := r.FormValue(\"generatePass\")\n\tpass := r.FormValue(\"pass\")\n\taccountNumber := r.FormValue(\"account_ID\")\n\tblnGenPass := false\n\tif genPass == \"true\" {\n\t\tblnGenPass = true\n\t}\n\n\tif user.Email == \"\" || (pass == \"\" && !blnGenPass) {\n\t\terr = errors.New(\"Email and password are required.\")\n\t\tapierror.GenerateError(\"Email and password are required\", err, rw, r)\n\t\treturn \"\"\n\t}\n\n\tif blnGenPass {\n\t\tuser.Password = encryption.GeneratePassword()\n\t} else {\n\t\tuser.Password = pass\n\t}\n\n\tuser.OldCustomerID = user.CustomerID\n\tif accountNumber != \"\" { // Account Number is optional\n\t\t// fetch the customerID from the account number\n\t\tvar cust customer.Customer\n\t\terr = cust.GetCustomerIdsFromAccountNumber(accountNumber)\n\t\tif cust.Id == 0 || err != nil {\n\t\t\tif err == nil {\n\t\t\t\terr = errors.New(\"Account Number is not associated to any customer\")\n\t\t\t}\n\t\t\tapierror.GenerateError(\"Invalid Account Number:\", err, rw, r)\n\t\t\treturn \"\"\n\t\t}\n\t\tuser.OldCustomerID = cust.CustomerId\n\t\tuser.CustomerID = cust.Id\n\t\tuser.CustID = cust.Id\n\t}\n\n\t//check for existence of user\n\terr = user.FindByEmail()\n\tif err == nil {\n\t\tapierror.GenerateError(\"A user with that email address already exists.\", err, rw, r)\n\t\treturn \"\"\n\t}\n\terr = nil\n\n\tuser.Brands, err = brand.GetUserBrands(user.CustID)\n\tif err != nil {\n\t\tapierror.GenerateError(\"Trouble getting user brands.\", err, rw, r)\n\t\treturn \"\"\n\t}\n\tvar brandIds []int\n\tfor _, brand := range user.Brands {\n\t\tif brand.ID == 1 || brand.ID == 3 || brand.ID == 4 {\n\t\t\tbrandIds = append(brandIds, brand.ID)\n\t\t}\n\t}\n\n\tif err = user.Create(brandIds); err != nil {\n\t\tapierror.GenerateError(\"Trouble registering new customer user\", err, rw, r)\n\t\treturn \"\"\n\t}\n\n\t//email\n\tif err = user.SendRegistrationEmail(); err != nil {\n\t\tapierror.GenerateError(\"Trouble emailing new customer user\", err, rw, r)\n\t\treturn \"\"\n\t}\n\n\tif err = user.SendRegistrationRequestEmail(); err != nil {\n\t\tapierror.GenerateError(\"Trouble emailing webdevelopment regarding new customer user\", err, rw, r)\n\t\treturn \"\"\n\t}\n\n\treturn encoding.Must(enc.Encode(user))\n}", "func RegisterUser(w http.ResponseWriter, r *http.Request) {\n\n\t// Init output\n\toutput := []byte(\"\")\n\n\t// Add content type header to the response\n\tcontentType := \"application/json\"\n\tcharset := \"utf-8\"\n\tw.Header().Add(\"Content-Type\", fmt.Sprintf(\"%s; charset=%s\", contentType, charset))\n\n\t// Grab url path variables\n\n\t// Grab context references\n\trefStr := gorillaContext.Get(r, \"str\").(stores.Store)\n\n\t// Read POST JSON body\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\terr := APIErrorInvalidRequestBody()\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// Parse pull options\n\trequestBody := auth.UserRegistration{}\n\terr = json.Unmarshal(body, &requestBody)\n\tif err != nil {\n\t\terr := APIErrorInvalidArgument(\"User\")\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\t// check if a user with that name already exists\n\tif auth.ExistsWithName(requestBody.Name, refStr) {\n\t\terr := APIErrorConflict(\"User\")\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\tuuid := uuid.NewV4().String()\n\tregistered := time.Now().UTC().Format(\"2006-01-02T15:04:05Z\")\n\ttkn, err := auth.GenToken()\n\tif err != nil {\n\t\terr := APIErrGenericInternal(\"\")\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\tur, err := auth.RegisterUser(uuid, requestBody.Name, requestBody.FirstName, requestBody.LastName, requestBody.Email,\n\t\trequestBody.Organization, requestBody.Description, registered, tkn, auth.PendingRegistrationStatus, refStr)\n\n\tif err != nil {\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\toutput, err = json.MarshalIndent(ur, \"\", \" \")\n\tif err != nil {\n\t\terr := APIErrGenericInternal(err.Error())\n\t\trespondErr(w, err)\n\t\treturn\n\t}\n\n\trespondOK(w, output)\n}", "func (a *App) Register(w http.ResponseWriter, r *http.Request) {\n\tvar registerInfo uvm.UserRegisterVM\n\n\tdecoder := json.NewDecoder(r.Body)\n\tvar err error\n\n\tif err = decoder.Decode(&registerInfo); err != nil {\n\t\trespondWithError(w, http.StatusBadRequest, \"Invalid request body\")\n\t\treturn\n\t}\n\n\t//TODO validate user data\n\n\tregisterInfo.Password, err = utils.HashPassword(registerInfo.Password)\n\n\tif err != nil {\n\t\trespondWithError(w, http.StatusInternalServerError, \"Could not register user\")\n\t\treturn\n\t}\n\n\tvar user models.User\n\n\tuser, err = a.UserStore.AddUser(registerInfo)\n\n\tif err != nil {\n\t\trespondWithError(w, http.StatusInternalServerError, err.Error())\n\t}\n\n\ttoken := auth.GenerateToken(a.APISecret, user)\n\n\tresult := models.UserResult{\n\t\tUsername: user.Username,\n\t\tPicture: user.Picture,\n\t\tRole: user.Role.Name,\n\t\tToken: token,\n\t}\n\n\trespondWithJSON(w, http.StatusOK, result)\n}", "func (s *Service) RegisterUser(username, plainTextPassword string) (*string, error) {\n\n\tid := uuid.New().String()\n\n\tencryotedPwd, err := bcrypt.GenerateFromPassword([]byte(plainTextPassword), 10)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tb64Pwd := base64.StdEncoding.EncodeToString(encryotedPwd)\n\n\tif _, err := s.createUserStmnt.Exec(id, username, b64Pwd); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &id, nil\n}", "func UserRegisterPost(w http.ResponseWriter, r *http.Request) {\n\t// Get session\n\tsess := session.Instance(r)\n\n\t// Prevent brute force login attempts by not hitting MySQL and pretending like it was invalid :-)\n\tif sess.Values[\"register_attempt\"] != nil && sess.Values[\"register_attempt\"].(int) >= 5 {\n\t\tlog.Println(\"Brute force register prevented\")\n\t\thttp.Redirect(w, r, \"/not_found\", http.StatusFound)\n\t\treturn\n\t}\n\n\tbody, readErr := ioutil.ReadAll(r.Body)\n\tif readErr != nil {\n\t\tlog.Println(readErr)\n\t\tReturnError(w, readErr)\n\t\treturn\n\t}\n\n\tvar regResp webpojo.UserCreateResp\n\tif len(body) == 0 {\n\t\tlog.Println(\"Empty json payload\")\n\t\tRecordRegisterAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_400, constants.Msg_400}\n\t\tbs, err := json.Marshal(regResp)\n\t\tif err != nil {\n\t\t\tReturnError(w, err)\n\t\t\treturn\n\t\t}\n\t\tfmt.Fprint(w, string(bs))\n\t\treturn\n\t}\n\n\t//log.Println(\"r.Body\", string(body))\n\tregReq := webpojo.UserCreateReq{}\n\tjsonErr := json.Unmarshal(body, &regReq)\n\tif jsonErr != nil {\n\t\tlog.Println(jsonErr)\n\t\tReturnError(w, jsonErr)\n\t\treturn\n\t}\n\tlog.Println(regReq.Email)\n\n\t// Validate with required fields\n\tif validate, _ := validateRegisterInfo(r, &regReq, constants.DefaultRole); !validate {\n\t\tlog.Println(\"Invalid reg request! Missing field\")\n\t\tRecordRegisterAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_400, constants.Msg_400}\n\t\tbs, err := json.Marshal(regResp)\n\t\tif err != nil {\n\t\t\tReturnError(w, err)\n\t\t\treturn\n\t\t}\n\t\tfmt.Fprint(w, string(bs))\n\t\treturn\n\t}\n\n\tpassword, errp := passhash.HashString(regReq.Password)\n\n\t// If password hashing failed\n\tif errp != nil {\n\t\tlog.Println(errp)\n\t\tRecordRegisterAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_500, constants.Msg_500}\n\t\tbs, err := json.Marshal(regResp)\n\t\tif err != nil {\n\t\t\tReturnError(w, err)\n\t\t\treturn\n\t\t}\n\t\tfmt.Fprint(w, string(bs))\n\t\treturn\n\t}\n\n\t// Get database result\n\t_, err := model.UserByEmail(regReq.Email)\n\n\tif err == model.ErrNoResult { // If success (no user exists with that email)\n\t\tex := model.UserCreate(regReq.FirstName, regReq.LastName, regReq.Email, password)\n\t\t// Will only error if there is a problem with the query\n\t\tif ex != nil {\n\t\t\tlog.Println(ex)\n\t\t\tRecordRegisterAttempt(sess)\n\t\t\tsess.Save(r, w)\n\t\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_500, constants.Msg_500}\n\t\t\tbs, err := json.Marshal(regResp)\n\t\t\tif err != nil {\n\t\t\t\tReturnError(w, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tfmt.Fprint(w, string(bs))\n\t\t} else {\n\t\t\tlog.Println(\"Account created successfully for: \" + regReq.Email)\n\t\t\tRecordRegisterAttempt(sess)\n\t\t\tsess.Save(r, w)\n\t\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_200, constants.Msg_200}\n\t\t\tbs, err := json.Marshal(regResp)\n\t\t\tif err != nil {\n\t\t\t\tReturnError(w, err)\n\t\t\t\treturn\n\t\t\t}\n\t\t\tfmt.Fprint(w, string(bs))\n\t\t}\n\t} else if err != nil { // Catch all other errors\n\t\tlog.Println(err)\n\t\tRecordRegisterAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_500, constants.Msg_500}\n\t\tbs, err := json.Marshal(regResp)\n\t\tif err != nil {\n\t\t\tReturnError(w, err)\n\t\t\treturn\n\t\t}\n\t\tfmt.Fprint(w, string(bs))\n\t} else { // Else the user already exists\n\t\tlog.Println(\"User already existed!!!\")\n\t\tRecordRegisterAttempt(sess)\n\t\tsess.Save(r, w)\n\t\tregResp = webpojo.UserCreateResp{constants.StatusCode_400, constants.Msg_400}\n\t\tbs, err := json.Marshal(regResp)\n\t\tif err != nil {\n\t\t\tReturnError(w, err)\n\t\t\treturn\n\t\t}\n\t\tfmt.Fprint(w, string(bs))\n\t}\n}", "func UserRegister(res http.ResponseWriter, req *http.Request) {\n\n\t// get user form from user register form\n\t// insert data to DB\n\t// First step would be Firstname, lastname and password..\n\t/*\n\t* encrypting password from frontend and decrypt at this end...\n\t* Password matching ( re entering)\n\t* Inserting to db ( firstname,lastname,email,password,registered_at)\n\t */\n\n\trequestID := req.FormValue(\"uid\")\n\tfirstName := req.FormValue(\"first_name\")\n\tlastName := req.FormValue(\"last_name\")\n\temail := req.FormValue(\"email\")\n\tpassword := req.FormValue(\"password\")\n\n\tlogs.WithFields(logs.Fields{\n\t\t\"Service\": \"User Service\",\n\t\t\"package\": \"register\",\n\t\t\"function\": \"UserRegister\",\n\t\t\"uuid\": requestID,\n\t\t\"email\": email,\n\t}).Info(\"Received data to insert to users table\")\n\n\t// check user entered same email address\n\thasAccount := Checkmail(email, requestID)\n\n\tif hasAccount != true {\n\n\t\tdb := dbConn()\n\n\t\t// Inserting token to login_token table\n\t\tinsertUser, err := db.Prepare(\"INSERT INTO users (email,first_name,last_name,password) VALUES(?,?,?,?)\")\n\t\tif err != nil {\n\t\t\tlogs.WithFields(logs.Fields{\n\t\t\t\t\"Service\": \"User Service\",\n\t\t\t\t\"package\": \"register\",\n\t\t\t\t\"function\": \"UserRegister\",\n\t\t\t\t\"uuid\": requestID,\n\t\t\t\t\"Error\": err,\n\t\t\t}).Error(\"Couldnt prepare insert statement for users table\")\n\t\t}\n\t\tinsertUser.Exec(email, firstName, lastName, password)\n\n\t\t// Inserting email to emails table\n\n\t\tinsertEmail, err := db.Prepare(\"INSERT INTO emails (email,isActive) VALUES(?,?)\")\n\t\tif err != nil {\n\t\t\tlogs.WithFields(logs.Fields{\n\t\t\t\t\"Service\": \"User Service\",\n\t\t\t\t\"package\": \"register\",\n\t\t\t\t\"function\": \"UserRegister\",\n\t\t\t\t\"uuid\": requestID,\n\t\t\t\t\"Error\": err,\n\t\t\t}).Error(\"Couldnt prepare insert statement for emails table\")\n\t\t}\n\t\tinsertEmail.Exec(email, 1)\n\n\t\t_, err = http.PostForm(\"http://localhost:7070/response\", url.Values{\"uid\": {requestID}, \"service\": {\"User Service\"},\n\t\t\t\"function\": {\"UserRegister\"}, \"package\": {\"Register\"}, \"status\": {\"1\"}})\n\n\t\tif err != nil {\n\t\t\tlog.Println(\"Error response sending\")\n\t\t}\n\n\t\tdefer db.Close()\n\t\treturn\n\t} // user has an account\n\n\tlogs.WithFields(logs.Fields{\n\t\t\"Service\": \"User Service\",\n\t\t\"package\": \"register\",\n\t\t\"function\": \"UserRegister\",\n\t\t\"uuid\": requestID,\n\t\t\"email\": email,\n\t}).Error(\"User has an account for this email\")\n\n\t_, err := http.PostForm(\"http://localhost:7070/response\", url.Values{\"uid\": {requestID}, \"service\": {\"User Service\"},\n\t\t\"function\": {\"sendLoginEmail\"}, \"package\": {\"Check Email\"}, \"status\": {\"0\"}})\n\n\tif err != nil {\n\t\tlog.Println(\"Error response sending\")\n\t}\n}", "func (serv *Server) RegisterUser(creds Credentials) (err error) {\n row := serv.db.QueryRow(\"select uid from users where username = ?;\", creds.Username)\n\n var uid int\n if row.Scan(&uid) == sql.ErrNoRows {\n salt := make([]byte, SaltLength)\n rand.Read(salt)\n\n saltedHash, err := HashAndSaltPassword([]byte(creds.Password), salt)\n if err != nil {\n return err\n }\n\n _, err = serv.db.Exec(\n `insert into users (username, salt, saltedhash) values (?, ?, ?);`,\n creds.Username, salt, saltedHash)\n\n if err != nil {\n err = ErrRegistrationFailed\n }\n } else {\n err = ErrUsernameTaken\n }\n\n return\n}", "func AdminInitOnBoot(s *mgo.Session) error {\n\n\tfmt.Println(\"/////////////////////////////////////////////////\")\n\tfmt.Println(\"AdminInitFromBoot\")\n\tfmt.Println(\"/////////////////////////////////////////////////\")\n\n\tc := s.DB(DatabaseName).C(CollUser)\n\n\tvar result model.User\n\tc.Find(bson.M{\"userid\": \"[email protected]\"}).One(&result)\n\tif result.UserNo != \"\" {\n\t\treturn nil\n\t}\n\n\tnow := time.Now()\n\tfmt.Println(\"admin inserted at \" + now.String())\n\tvar insert = model.User{\n\t\tUserId: \"admin\",\n\t\tPassword: \"admin\",\n\t\tUserType: 4,\n\t\tCreateDateTime: now,\n\t\tState: 1,\n\t\tActivated: 0,\n\t}\n\terr := c.Insert(&insert)\n\treturn err\n}", "func (u *User) Register(ctx context.Context, user model.User) (*model.User, error) {\n\tspan, _ := jtrace.Tracer.SpanFromContext(ctx, \"register_user\")\n\tdefer span.Finish()\n\tspan.SetTag(\"register\", \"register user model\")\n\n\ttx := mysql.Storage.GetDatabase().Begin()\n\n\tif err := tx.Create(&user).Error; err != nil {\n\t\tlog := logger.GetZapLogger(false)\n\t\tlogger.Prepare(log).\n\t\t\tAppend(zap.Any(\"error\", fmt.Sprintf(\"register user: %s\", err))).\n\t\t\tLevel(zap.ErrorLevel).\n\t\t\tDevelopment().\n\t\t\tCommit(\"env\")\n\t\ttx.Rollback()\n\t\treturn nil, err\n\t}\n\tdefer tx.Commit()\n\n\treturn &user, nil\n}", "func (s *Session) createUser() error {\n\tlog := s.context.Log()\n\n\t// Create local user\n\tcommandArgs := []string{\"net\", \"user\", \"/add\", appconfig.DefaultRunAsUserName}\n\tcmd := exec.Command(appconfig.PowerShellPluginCommandName, commandArgs...)\n\tif err := cmd.Run(); err != nil {\n\t\tlog.Errorf(\"Failed to create %s: %v\", appconfig.DefaultRunAsUserName, err)\n\t\treturn err\n\t}\n\tlog.Infof(\"Successfully created %s\", appconfig.DefaultRunAsUserName)\n\n\t// Add to admins group\n\tcommandArgs = []string{\"net\", \"localgroup\", administrators, appconfig.DefaultRunAsUserName, \"/add\"}\n\tcmd = exec.Command(appconfig.PowerShellPluginCommandName, commandArgs...)\n\tif err := cmd.Run(); err != nil {\n\t\tlog.Errorf(\"Failed to add %s to %s group: %v\", appconfig.DefaultRunAsUserName, administrators, err)\n\t\treturn err\n\t}\n\tlog.Infof(\"Successfully added %s to %s group\", appconfig.DefaultRunAsUserName, administrators)\n\treturn nil\n}", "func Register(name string, fc func(request *http.Request, user interface{}) bool) {\n\trole.Register(name, fc)\n}", "func InitializeUser(logger *logrus.Logger, grpcServer *grpc.Server, db *sql.DB) {\n\n\tumr := repository.NewUserMariadbRepository(db)\n\tuu := usecase.NewUserUsecase(logger, umr)\n\n\tgrpcUserServiceServer := handler.NewUserGRPCHandler(uu)\n\tpb.RegisterUserServiceServer(grpcServer, grpcUserServiceServer)\n}", "func Registration(c echo.Context) error {\n\tu := new(models.User)\n\tif err := c.Bind(u); err != nil {\n\t\treturn c.JSON(http.StatusBadRequest, err.Error())\n\t}\n\t// encrypt password\n\tpassword, err := utils.EncryptPassword(os.Getenv(\"SALT\"), c.FormValue(\"password\"))\n\n\tif err != nil {\n\t\treturn c.JSON(http.StatusInternalServerError, err)\n\t}\n\n\tu.Password = password\n\n\tfmt.Println(password)\n\n\tif res := db.DBCon.Create(u); res.Error != nil {\n\t\treturn c.JSON(http.StatusBadRequest, res.Error)\n\t}\n\treturn c.JSON(http.StatusCreated, u)\n}", "func (u *UserHandler) Register(parentGroup *gin.RouterGroup) error {\n\tuserGroup := parentGroup.Group(\"users\")\n\tauthenticatedUserGroup := parentGroup.Group(\"users\")\n\tauthenticatedUserGroup.Use(u.AuthMiddleware.HandleAuth())\n\t{\n\t\tuserGroup.POST(\"\", u.RegisterUser)\n\t\tuserGroup.GET(\"/:userid\", u.GetUser)\n\t\tauthenticatedUserGroup.GET(\"\", u.GetAllUsers)\n\t}\n\treturn nil\n}", "func (c *Credentials) Register(username, password string) {\n\tuser := EncryptUser(username, password)\n\tc.whitelist[user] = struct{}{}\n}", "func (a *Server) CreateUser(w http.ResponseWriter, r *http.Request) {\n\tfmt.Println(\"create a new user\")\n}", "func (c *Client) RegisterUser(loginName, password string, attrs *map[string]interface{}) (bool, error) {\n\t// Create a request object.\n\treqobj := newMap(attrs)\n\treqobj[\"loginName\"] = loginName\n\treqobj[\"password\"] = password\n\n\terr := c.Send(c.appPath(\"users\"), \"POST\", reqobj,\n\t\t\"application/vnd.kii.RegistrationRequest+json\",\n\t\tfunc(resp *http.Response) error {\n\t\t\tswitch resp.StatusCode {\n\t\t\tcase 201:\n\t\t\t\treturn nil\n\t\t\tdefault:\n\t\t\t\treturn ToError(resp)\n\t\t\t}\n\t\t})\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func Register(w http.ResponseWriter, r *http.Request, opt router.UrlOptions, sm session.ISessionManager, s store.IStore) {\n\tswitch r.Method {\n\tcase \"GET\":\n\t\tutils.RenderTemplate(w, r, \"register\", sm, make(map[string]interface{}))\n\n\tcase \"POST\":\n\t\tvar newUser *user.User\n\n\t\tdfc := s.GetDataSource(\"persistence\")\n\n\t\tp, ok := dfc.(persistence.IPersistance)\n\t\tif !ok {\n\t\t\tlogger.Log(\"Invalid store\")\n\t\t\treturn\n\t\t}\n\n\t\tc := p.GetCollection(\"users\")\n\n\t\tapiServer := r.PostFormValue(\"api-server\")\n\t\tusername := r.PostFormValue(\"username\")\n\t\tpassword := hash.EncryptString(r.PostFormValue(\"password\"))\n\n\t\tnewUser = &user.User{\n\t\t\tID: bson.NewObjectId(),\n\t\t\tUsername: username,\n\t\t\tPassword: password,\n\t\t\tAPIServerIP: apiServer,\n\t\t}\n\n\t\terr := c.Insert(newUser)\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tlogger.Log(\"Error registering user '\" + username + \"'\")\n\t\t\treturn\n\t\t}\n\t\tlogger.Log(\"Registered user '\" + username + \"'\")\n\n\t\tlogger.Log(\"Registering user in API server \" + apiServer)\n\t\tform := url.Values{}\n\t\tform.Add(\"username\", username)\n\t\tform.Add(\"password\", password)\n\n\t\tregisterURL := \"http://\" + apiServer + \":\" + os.Getenv(\"SH_API_SRV_PORT\") + \"/login/register\"\n\t\t_, err = http.PostForm(registerURL, form)\n\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tlogger.Log(\"Error registering user in endpoint \" + registerURL)\n\t\t}\n\n\t\thttp.Redirect(w, r, \"/\", http.StatusSeeOther)\n\tdefault:\n\t}\n}", "func (h *auth) Register(c echo.Context) error {\n\t// Filter params\n\tvar params service.RegisterParams\n\tif err := c.Bind(&params); err != nil {\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"Could not get user's params.\"))\n\t}\n\tparams.UserAgent = c.Request().UserAgent()\n\tparams.Session = currentSession(c)\n\n\tif params.Email == \"\" {\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"No email provided.\"))\n\t}\n\tif params.RegistrationPassword == \"\" {\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"No password provided.\"))\n\t}\n\tif params.PasswordNonce == \"\" {\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"No nonce provided.\"))\n\t}\n\tif libsf.VersionLesser(libsf.APIVersion20200115, params.APIVersion) && params.PasswordCost <= 0 {\n\t\treturn c.JSON(http.StatusUnauthorized, sferror.New(\"No password cost provided.\"))\n\t}\n\n\tservice := service.NewUser(h.db, h.sessions, params.APIVersion)\n\tregister, err := service.Register(params)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn c.JSON(http.StatusOK, register)\n}", "func RegisterUser(w http.ResponseWriter, r *http.Request) {\n\tbody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t\thttp.Error(w, \"No input found!\", http.StatusBadRequest)\n\t\treturn\n\t}\n\tvar newReq User\n\terr = json.Unmarshal(body, &newReq)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tvar username = newReq.UserID\n\tif _, ok := userData[username]; ok {\n\t\thttp.Error(w, \"User already exists!\", http.StatusBadRequest)\n\t\treturn\n\t}\n\n\t// log.Println(util.StringWithCharset(random.Intn(20)+10, charset))\n\tpreHashString := newReq.UserID + util.StringWithCharset(random.Intn(20)+10, util.Charset)\n\thashedString := crypto.CreateSHA256Hash(preHashString)\n\tuserData[username] = hashedString\n\thashOutput := UserHash{hashedString}\n\tlog.Println(userData)\n\toutJSON, err := json.Marshal(hashOutput)\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tw.Write(outJSON)\n}", "func Register (w http.ResponseWriter, r *http.Request) {\n\tw.Header().Set(\"content-type\", \"application/json\")\n\n\tvar user models.User\n\tvar res models.ResponseResult\n\n\tbody, _ := ioutil.ReadAll(r.Body)\n\terr := json.Unmarshal(body, &user)\n\n\tif err != nil {\n\t\tres.Error = err.Error()\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tif msg, validationResult := user.Valid(); !validationResult {\n\t\tres.Error = msg\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\n\thash, err := bcrypt.GenerateFromPassword([]byte(user.Password), 10)\n\tif err != nil {\n\t\tre := models.ResponseError{\n\t\t\tCode: constants.ErrCodeHashError,\n\t\t\tMessage: constants.MsgHashError,\n\t\t\tOriginalError: err,\n\t\t}\n\t\tres.Error = re\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tuser.Password = string(hash)\n\t_, err = models.InsertOne(models.UserCollection, user)\n\n\tif err != nil {\n\t\tre := models.ResponseError{\n\t\t\tCode: constants.ErrCodeInsertOne,\n\t\t\tMessage: strings.Replace(constants.MsgErrorInsertOne, \"%COLLECTION%\", models.UserCollection, -1),\n\t\t\tOriginalError: err,\n\t\t}\n\t\tres.Error = re\n\t\t_ = json.NewEncoder(w).Encode(res)\n\t\treturn\n\t}\n\n\tres.Error = false\n\tres.Result = strings.Replace(constants.MsgSuccessInsertedOne, \"%COLLECTION%\", models.UserCollection, -1)\n\t_ = json.NewEncoder(w).Encode(res)\n\n\treturn\n\n}", "func Register(w http.ResponseWriter, r *http.Request) {\n\tt:= models.Users{}\n\n\terr := json.NewDecoder(r.Body).Decode(&t)\n\n\tif err != nil {\n\t\thttp.Error(w, \"Error en los datos recibidos \"+err.Error(), 400)\n\t\treturn\n\t}\n\tif len(t.Login) < 6 {\n\t\thttp.Error(w, \"Error en los datos recibidos, ingrese un login mayor a 5 digitos \", 400)\n\t\treturn\n\t}\n\tif len(t.Password) < 6 {\n\t\thttp.Error(w, \"Ingrese una contraseña mayor a 5 digitos \", 400)\n\t\treturn\n\t}\n\n\t_, found, _ := bd.CheckUser(t.Login)\n\tif found == true {\n\t\thttp.Error(w, \"Ya existe un usuario registrado con ese login\", 400)\n\t\treturn\n\t}\n\n\tif t.Id_role == 3 {\n\t\tcod := bd.CodFamiliar(t.Cod_familiar)\n\t\tif cod == false {\n\t\t\thttp.Error(w, \"Debe ingresar un codigo de familia correcto\", 400)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif t.Id_role == 1 {\n\t\thttp.Error(w, \"Usted no esta autorizado para crear este tipo de usuario\", 400)\n\t\treturn\n\t}\n\n\t_, status, err := bd.InsertRegister(t)\n\tif err != nil {\n\t\thttp.Error(w, \"Ocurrió un error al intentar realizar el registro de usuario \"+err.Error(), 400)\n\t\treturn\n\t}\n\n\tif status == false {\n\t\thttp.Error(w, \"No se ha logrado insertar el registro del usuario\", 400)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusCreated)\n}", "func Register(w http.ResponseWriter, r *http.Request, opt router.UrlOptions, sm session.ISessionManager, s store.IStore) {\n\tswitch r.Method {\n\tcase \"GET\":\n\t\tparams := make(map[string]interface{})\n\t\tutils.RenderTemplate(w, r, \"register\", sm, s, params)\n\n\tcase \"POST\":\n\t\tvar newUser *user.User\n\n\t\tdfc := s.GetDataSource(\"persistence\")\n\n\t\tp, ok := dfc.(persistence.IPersistance)\n\t\tif !ok {\n\t\t\tutl.Log(\"Invalid store\")\n\t\t\treturn\n\t\t}\n\n\t\tc := p.GetCollection(\"users\")\n\n\t\tnewUser = &user.User{\n\t\t\tID: bson.NewObjectId(),\n\t\t\tUsername: r.PostFormValue(\"username\"),\n\t\t\tPassword: utils.HashString(r.PostFormValue(\"password\")),\n\t\t}\n\n\t\terr := c.Insert(newUser)\n\t\tif err != nil {\n\t\t\tutl.Log(err)\n\t\t}\n\n\t\tutl.Log(\"Registered user\", newUser)\n\n\t\thttp.Redirect(w, r, \"/\", http.StatusSeeOther)\n\tdefault:\n\t}\n}", "func setupUser(config *initConfig) error {\n\t// Set up defaults.\n\tdefaultExecUser := user.ExecUser{\n\t\tUid: syscall.Getuid(),\n\t\tGid: syscall.Getgid(),\n\t\tHome: \"/\",\n\t}\n\tpasswdPath, err := user.GetPasswdPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\tgroupPath, err := user.GetGroupPath()\n\tif err != nil {\n\t\treturn err\n\t}\n\texecUser, err := user.GetExecUserPath(config.User, &defaultExecUser, passwdPath, groupPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar addGroups []int\n\tif len(config.Config.AdditionalGroups) > 0 {\n\t\taddGroups, err = user.GetAdditionalGroupsPath(config.Config.AdditionalGroups, groupPath)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// before we change to the container's user make sure that the processes STDIO\n\t// is correctly owned by the user that we are switching to.\n\tif err := fixStdioPermissions(execUser); err != nil {\n\t\treturn err\n\t}\n\tsuppGroups := append(execUser.Sgids, addGroups...)\n\tif err := syscall.Setgroups(suppGroups); err != nil {\n\t\treturn err\n\t}\n\n\tif err := system.Setgid(execUser.Gid); err != nil {\n\t\treturn err\n\t}\n\tif err := system.Setuid(execUser.Uid); err != nil {\n\t\treturn err\n\t}\n\t// if we didn't get HOME already, set it based on the user's HOME\n\tif envHome := os.Getenv(\"HOME\"); envHome == \"\" {\n\t\tif err := os.Setenv(\"HOME\", execUser.Home); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func init() {\n\tregister(\"POST\", \"/user/register\", controllers.Register, nil)\n\tregister(\"POST\", \"/user/login\", controllers.Login, nil)\n\tregister(\"GET\", \"/user/info\", controllers.UserInfo, auth.TokenMiddleware)\n\tregister(\"POST\", \"/user/logout\", controllers.Logout, auth.TokenMiddleware)\n\tregister(\"POST\", \"/user/shift\", controllers.CreateUserSchedule, nil)\n\tregister(\"POST\", \"/admin/month\", controllers.SetMonth, nil)\n}", "func RegisterUser(user models.User)(string,bool,error) {\n\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\tdefer cancel()\n\tdb := MongoC.Database(\"twitter\")\n\tcol := db.Collection(\"users\")\n\tuser.Password,_ = EncriptPassw(user.Password)\n\tresult, err := col.InsertOne(ctx,user)\n\tif err != nil{\n\t\treturn \"\",false,err\n\t}\n\tObjID,_ := result.InsertedID.(primitive.ObjectID)\n\treturn ObjID.String(),true,nil\n}", "func (*RegDBService) AddUser(reg *Registration) error {\n\terr := rdb.Create(&reg).Error\n\treturn err\n}", "func (h *Handler) SignUpUser(args []string) error {\n\tif h.CurrentUser.Root {\n\t\t//fmt.Println(args)\n\t\tswitch len(args) {\n\t\tcase 3:\n\t\t\tname := args[1]\n\t\t\tpassword := args[2]\n\t\t\terr := models.CreateUser(name, password, false)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t_, err = h.writeToUser(\"Password for %s is set!\", name)\n\t\t\treturn err\n\t\t\t//TODO - not working, because `h.Term.ReadPassword` behaves unconsistent\n\t\t\t//\t\tcase 2:\n\t\t\t//\t\t\tname := args[1]\n\t\t\t//\t\t\t//\t\t\tfmt.Println(\"Waiting for password\")\n\t\t\t//\t\t\t//\t\t\th.Term.Write([]byte(fmt.Sprintf(\"\\nEnter password for ordinary user %s:\\n\", name)))\n\t\t\t//\t\t\tpassword, err := h.Term.ReadPassword(fmt.Sprintf(\"Enter password for user `%s`:\", name))\n\t\t\t//\t\t\th.Term.SetPrompt(h.PrintPrompt())\n\t\t\t//\t\t\tif err != nil {\n\t\t\t//\t\t\t\treturn err\n\t\t\t//\t\t\t}\n\t\t\t//\t\t\t//\t\t\tfmt.Println(\"Password recieved\", password)\n\t\t\t//\t\t\t//\t\t\tfmt.Println(\"Prompt updated, creating user\")\n\t\t\t//\t\t\treturn models.CreateUser(name, password, false)\n\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"Try `\\\\r someUserName [newPassword]` to sign up or change password for somebody!\")\n\t\t}\n\t}\n\treturn fmt.Errorf(\"You have to be root to signing up/registering/changing password!\")\n}", "func Register(db *gorm.DB, ctx echo.Context, username, email, password string) (*User, error) {\n\treqTimeSec, err := strconv.ParseInt(ctx.Request().Header.Get(\"REQUEST_TIME\"), 10, 64)\n\treqTime := time.Now()\n\tif err == nil {\n\t\treqTime = time.Unix(reqTimeSec, 0)\n\t}\n\n\tip := web.IP(ctx)\n\tlocation := web.Location(ctx, ip)\n\n\tu := &User{\n\t\tUsername: username,\n\t\tEmail: email,\n\t\tPassword: password,\n\t\tCreateIP: ip,\n\t\tCreateLocation: location,\n\t\tLastLoginTime: uint64(reqTime.Unix()),\n\t\tLastLoginIP: ip,\n\t\tLastLoginLocation: location,\n\t}\n\n\terr = db.Save(u).Error\n\treturn u, err\n}", "func (s *Shell) CreateUser(c *cli.Context) (err error) {\n\tresp, err := s.HTTP.Get(\"/v2/users/\", nil)\n\tif err != nil {\n\t\treturn s.errorOut(err)\n\t}\n\tdefer func() {\n\t\tif cerr := resp.Body.Close(); cerr != nil {\n\t\t\terr = multierr.Append(err, cerr)\n\t\t}\n\t}()\n\tvar links jsonapi.Links\n\tvar users AdminUsersPresenters\n\tif err := s.deserializeAPIResponse(resp, &users, &links); err != nil {\n\t\treturn s.errorOut(err)\n\t}\n\tfor _, user := range users {\n\t\tif strings.EqualFold(user.Email, c.String(\"email\")) {\n\t\t\treturn s.errorOut(fmt.Errorf(\"user with email %s already exists\", user.Email))\n\t\t}\n\t}\n\n\tfmt.Println(\"Password of new user:\")\n\tpwd := s.PasswordPrompter.Prompt()\n\n\trequest := struct {\n\t\tEmail string `json:\"email\"`\n\t\tRole string `json:\"role\"`\n\t\tPassword string `json:\"password\"`\n\t}{\n\t\tEmail: c.String(\"email\"),\n\t\tRole: c.String(\"role\"),\n\t\tPassword: pwd,\n\t}\n\n\trequestData, err := json.Marshal(request)\n\tif err != nil {\n\t\treturn s.errorOut(err)\n\t}\n\n\tbuf := bytes.NewBuffer(requestData)\n\tresponse, err := s.HTTP.Post(\"/v2/users\", buf)\n\tif err != nil {\n\t\treturn s.errorOut(err)\n\t}\n\tdefer func() {\n\t\tif cerr := response.Body.Close(); cerr != nil {\n\t\t\terr = multierr.Append(err, cerr)\n\t\t}\n\t}()\n\n\treturn s.renderAPIResponse(response, &AdminUsersPresenter{}, \"Successfully created new API user\")\n}", "func Register(ctx echo.Context) error {\n\treq := new(registerRequest)\n\tif err := ctx.Bind(req); err != nil {\n\t\treturn err\n\t}\n\tuser := User{Username: req.Username, Password: md5Pwd(req.Password), Type: req.Type}\n\terr := db.Model(&User{}).First(&user, &User{Username: req.Username}).Error\n\tif err == gorm.ErrRecordNotFound {\n\t\te := db.Create(&user).Error\n\t\tif e == nil {\n\t\t\tctx.SetCookie(&http.Cookie{Name: cookieKey, Value: user.Base.ID.String()})\n\t\t\treturn ctx.JSON(http.StatusOK, &response{\n\t\t\t\tCode: 0,\n\t\t\t\tMsg: \"\",\n\t\t\t\tData: registerResponse{\n\t\t\t\t\tUsername: user.Username,\n\t\t\t\t\tType: user.Type,\n\t\t\t\t\tID: user.Base.ID,\n\t\t\t\t},\n\t\t\t})\n\t\t}\n\t\treturn e\n\t}\n\tif err != nil {\n\t\treturn err\n\t}\n\tres := &response{\n\t\tCode: 1,\n\t\tMsg: \"User name has been taken\",\n\t}\n\treturn ctx.JSON(http.StatusBadRequest, res)\n}", "func registerUser(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {\n\n\tvar user_obj sbiStruct.user\t\n\tvar err error\n\n\tfmt.Println(\"Entering registerUser\")\n\n\tif (len(args) < 1) {\n\t\tfmt.Println(\"Invalid number of args\")\n\t\treturn nil, errors.New(\"Expected atleast one arguments for initiate Transaction\")\n\t}\n\n\tfmt.Println(\"Args [0] is : %v\\n\",args[0])\n\tfmt.Println(\"Args [1] is : %v\\n\",args[1])\n\t\n\t//unmarshal transaction initiation data from UI to \"transactionInitiation\" struct\n\terr = json.Unmarshal([]byte(args[1]), &user_obj)\n\tif err != nil {\n\t\tfmt.Printf(\"Unable to unmarshal createTransaction input transaction initiation : %s\\n\", err)\n\t\treturn nil, nil\n\t}\n\n\tfmt.Println(\"TransactionInitiation object refno variable value is : %s\\n\",trans_obj.TransRefNo);\n\t\n\tGetUserMap(stub)\t\n\n\tuser_map[user_obj.uname] = user_obj\t\n\n\tSetUserMap(stub)\t\n\t\n\tfmt.Printf(\"final user map : %v \\n\", user_map)\t\t\n\t\n\treturn nil, nil\n}", "func LoadViewCreateUser(w http.ResponseWriter, r *http.Request) {\n\tutils.RunTemplate(w, \"register.html\", nil)\n}", "func Register(user models.User) (string, bool, error){\n\tctx, cancel := context.WithTimeout(context.Background(), 15*time.Second)\n\t\n\tdefer cancel()\n\n\tdb := MongoClient.Database(\"test-api-go\")\n\tcol := db.Collection(\"users\")\n\n\tuser.Password, _ = PasswordEncrypt(user.Password)\n\n\tresult, err := col.InsertOne(ctx, user)\n\n\tif err != nil {\n\t\treturn \"\", false, err\n\t}\n\n\tObjID, _ := result.InsertedID.(primitive.ObjectID)\n\treturn ObjID.String(), true, nil\n}", "func (u *UserHandler) Register(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {\n\tvar registerReq domain.RegisterRequest\n\terr := json.NewDecoder(r.Body).Decode(&registerReq)\n\tif err != nil {\n\t\tlog.Warnf(\"Error decode user body when register : %s\", err)\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\terrors := u.Validator.Validate(registerReq)\n\tif errors != nil {\n\t\tlog.Warnf(\"Error validate register : %s\", err)\n\t\tw.WriteHeader(http.StatusBadRequest)\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tjson.NewEncoder(w).Encode(errors)\n\t\treturn\n\t}\n\tuser := domain.User{\n\t\tName: registerReq.Name,\n\t\tEmail: registerReq.Email,\n\t\tPassword: registerReq.Password,\n\t}\n\terr = u.UserSerivce.Register(r.Context(), &user)\n\tif err != nil {\n\t\tlog.Warnf(\"Error register user : %s\", err)\n\t\thttp.Error(w, err.Error(), http.StatusBadRequest)\n\t\treturn\n\t}\n\tresponse := SuccessResponse{\n\t\tMessage: \"Success Register User\",\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(w).Encode(response)\n\treturn\n}", "func RegisterNewUser(c *soso.Context) {\n\treq := c.RequestMap\n\trequest := &auth_protocol.NewUserRequest{}\n\n\tif value, ok := req[\"source\"].(string); ok {\n\t\trequest.Source = value\n\t}\n\n\tif value, ok := req[\"phone\"].(string); ok {\n\t\trequest.PhoneNumber = value\n\t}\n\n\tif value, ok := req[\"instagram_username\"].(string); ok {\n\t\tvalue = strings.Trim(value, \" \\r\\n\\t\")\n\t\tif !nameValidator.MatchString(value) {\n\t\t\tlog.Debug(\"name '%v' isn't valid\", value)\n\t\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"Invalid instagram name\"))\n\t\t\treturn\n\t\t}\n\t\trequest.InstagramUsername = value\n\t}\n\n\tif value, ok := req[\"username\"].(string); ok {\n\t\tvalue = strings.Trim(value, \" \\r\\n\\t\")\n\t\tif !nameValidator.MatchString(value) {\n\t\t\tlog.Debug(\"name '%v' isn't valid\", value)\n\t\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"Invalid user name\"))\n\t\t\treturn\n\t\t}\n\t\trequest.Username = value\n\t}\n\n\tif request.InstagramUsername == \"\" && request.Username == \"\" {\n\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"User name or instagram name is required\"))\n\t\treturn\n\t}\n\n\tif request.PhoneNumber == \"\" {\n\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, errors.New(\"User phone number is required\"))\n\t\treturn\n\t}\n\n\tctx, cancel := rpc.DefaultContext()\n\tdefer cancel()\n\tresp, err := authClient.RegisterNewUser(ctx, request)\n\n\tif err != nil {\n\t\tc.ErrorResponse(http.StatusBadRequest, soso.LevelError, err)\n\t\treturn\n\t}\n\n\tc.SuccessResponse(map[string]interface{}{\n\t\t\"ErrorCode\": resp.ErrorCode,\n\t\t\"ErrorMessage\": resp.ErrorMessage,\n\t})\n}", "func (db *Database) CreateUser(username, password string, permissions []string) error {\n\t// TODO: Authorization.\n\n\tc := &createDBUserCommand{\n\t\tDatabase: db.Name(),\n\t\tUsername: username,\n\t\tPassword: password,\n\t\tPermissions: permissions,\n\t}\n\t_, err := db.server.broadcast(createDBUserMessageType, c)\n\treturn err\n}", "func RegisterUser(c *gin.Context) {\n\t// Check Password confirmation\n\tpassword := c.PostForm(\"password\")\n\tconfirmedPassword := c.PostForm(\"confirmed_password\")\n\ttoken, _ := RandomToken()\n\n\t// Return Error if not confirmed\n\tif password != confirmedPassword {\n\t\tc.JSON(500, gin.H{\n\t\t\t\"status\": \"error\",\n\t\t\t\"message\": \"password not confirmed\"})\n\t\tc.Abort()\n\t\treturn\n\t}\n\n\t// Hash the password\n\thash, _ := HashPassword(password)\n\n\t// Get Form\n\titem := models.User{\n\t\tUserName: c.PostForm(\"user_name\"),\n\t\tFullName: c.PostForm(\"full_name\"),\n\t\tEmail: c.PostForm(\"email\"),\n\t\tPassword: hash,\n\t\tVerificationToken: token,\n\t}\n\n\tif err := config.DB.Create(&item).Error; err != nil {\n\t\tc.JSON(500, gin.H{\n\t\t\t\"status\": \"error\",\n\t\t\t\"message\": err})\n\t\tc.Abort()\n\t\treturn\n\t}\n\n\t// I want to send email for activation\n\n\tc.JSON(200, gin.H{\n\t\t\"status\": \"successfuly register user, please check your email\",\n\t\t\"data\": item,\n\t})\n}", "func (user User) Register(c appengine.Context) (User, error) {\n\n\t// If existing user return error\n\tpotential_user, err := getUserFromUsername(c, user.Username)\n\tif err != nil {\n\t\treturn user, err\n\t}\n\n\tif potential_user != (User{}) {\n\t\treturn user, errors.New(\"User with this username exists\")\n\t}\n\n\thashed_password, err := bcrypt.GenerateFromPassword([]byte(user.Password), COST)\n\tif err != nil {\n\t\treturn user, err\n\t}\n\tuser.Password = string(hashed_password)\n\n\t// save the user\n\tkey := datastore.NewIncompleteKey(c, \"Users\", nil)\n\t_, err = datastore.Put(c, key, &user)\n\n\tif err != nil {\n\t\treturn user, err\n\t}\n\n\treturn user, nil\n}", "func Register() gin.HandlerFunc {\n\treturn func(c *gin.Context) {\n\t\tvar userdb models.UsersDB\n\t\tclient := IntiateMongoConn() //init mongoDB connection\n\t\t_ = json.NewDecoder(c.Request.Body).Decode(&userdb)\n\t\t// check if username is alreadyused\n\t\talreadyUsed, err := checkAlreadyused(&userdb, client)\n\t\tif err != nil {\n\t\t\tif alreadyUsed {\n\t\t\t\tc.String(http.StatusNotAcceptable, err.Error())\n\n\t\t\t} else {\n\t\t\t\tlog.Fatal(err)\n\t\t\t\tc.String(http.StatusInternalServerError, err.Error())\n\n\t\t\t}\n\t\t}\n\t\tif !alreadyUsed {\n\t\t\t// add this user to database if username is not ever used\n\t\t\taddOneUser(&userdb, client)\n\t\t\tc.String(http.StatusOK, \"Successfully registered, your username:\"+userdb.Username+\", your password:\"+userdb.Password)\n\n\t\t}\n\n\t}\n\n}", "func (env *Env) Register(w http.ResponseWriter, r *http.Request) {\n\tdata := &RegisterRequest{}\n\tif err := render.Bind(r, data); err != nil {\n\t\trender.Render(w, r, ErrInvalidRequest(err))\n\t\treturn\n\t}\n\n\tif !emailRegexp.MatchString(data.Email) {\n\t\trender.Render(w, r, ErrRender(errors.New(\"invalid email\")))\n\t\treturn\n\t}\n\n\tpassword := data.User.Password\n\t_, err := env.userRepository.CreateNewUser(r.Context(), data.User)\n\tif err != nil {\n\t\trender.Render(w, r, ErrRender(err))\n\t\treturn\n\t}\n\n\tdata.User.Password = password\n\ttokenString, err := loginLogic(r.Context(), env.userRepository, data.User)\n\tif err != nil {\n\t\trender.Render(w, r, ErrUnauthorized(err))\n\t\treturn\n\t}\n\n\trender.JSON(w, r, tokenString)\n}", "func Register(w http.ResponseWriter, r *http.Request) {\n\tvar dataResource UserResource\n\t//Decode the incoming User json\n\n\terr := json.NewDecoder(r.Body).Decode(&dataResource)\n\tif err != nil {\n\t\tcommon.DisplayAppError(\n\t\t\tw,\n\t\t\terr,\n\t\t\t\"Invalid User data\",\n\t\t\t500,\n\t\t)\n\t\treturn\n\t}\n\tuser := &dataResource.Data\n\tcontext := NewContext()\n\tdefer context.Close()\n\n\tc := context.DbCollection(\"users\")\n\trepo := &data.UserRespository{c}\n\n\t//insert User document\n\trepo.CreateUser(user)\n\t//Clean-up the hashpassword to eliminate it from response\n\tuser.HashPassword = nil\n\n\tif j, err := json.Marshal(UserResource{Data: *user}); err != nil {\n\t\tcommon.DisplayAppError(\n\t\t\tw,\n\t\t\terr,\n\t\t\t\"An unexpected error has occurred\",\n\t\t\t500,\n\t\t)\n\t\treturn\n\t} else {\n\t\tw.Header().Set(\"Content-Type\", \"application/json\")\n\t\tw.WriteHeader(http.StatusCreated)\n\t\tw.Write(j)\n\t}\n\n}", "func SetupUser(mgr ctrl.Manager, o controller.Options) error {\n\tname := managed.ControllerName(svcapitypes.UserGroupKind)\n\topts := []option{\n\t\tfunc(e *external) {\n\t\t\tc := &custom{client: e.client, kube: e.kube, external: e}\n\t\t\te.isUpToDate = c.isUpToDate\n\t\t\te.preCreate = c.preCreate\n\t\t\te.postCreate = postCreate\n\t\t\te.preObserve = preObserve\n\t\t\te.preDelete = preDelete\n\t\t\te.postObserve = c.postObserve\n\t\t\te.preUpdate = c.preUpdate\n\t\t\te.postUpdate = c.postUpdate\n\t\t},\n\t}\n\n\tcps := []managed.ConnectionPublisher{managed.NewAPISecretPublisher(mgr.GetClient(), mgr.GetScheme())}\n\tif o.Features.Enabled(features.EnableAlphaExternalSecretStores) {\n\t\tcps = append(cps, connection.NewDetailsManager(mgr.GetClient(), v1alpha1.StoreConfigGroupVersionKind))\n\t}\n\n\treturn ctrl.NewControllerManagedBy(mgr).\n\t\tNamed(name).\n\t\tWithOptions(o.ForControllerRuntime()).\n\t\tFor(&svcapitypes.User{}).\n\t\tComplete(managed.NewReconciler(mgr,\n\t\t\tresource.ManagedKind(svcapitypes.UserGroupVersionKind),\n\t\t\tmanaged.WithInitializers(managed.NewDefaultProviderConfig(mgr.GetClient())),\n\t\t\tmanaged.WithExternalConnecter(&connector{kube: mgr.GetClient(), opts: opts}),\n\t\t\tmanaged.WithPollInterval(o.PollInterval),\n\t\t\tmanaged.WithLogger(o.Logger.WithValues(\"controller\", name)),\n\t\t\tmanaged.WithRecorder(event.NewAPIRecorder(mgr.GetEventRecorderFor(name))),\n\t\t\tmanaged.WithConnectionPublishers(cps...)))\n}", "func (db *Database) RegisterUser(name string, passhash string, ip string) error {\n\treturn db.RegisterUserOwner(name, passhash, false, ip)\n}", "func (m *Manager) RegisterUser(username string, password string) bool {\n\t_, found := m.users[username]\n\tif !found {\n\t\tpwd := m.getSaltedHashedPassword(password)\n\t\tm.users[username] = credentials{username: username, password: pwd}\n\t\tlog.Printf(\"Registering: %s\", username)\n\t\treturn true\n\t}\n\tlog.Printf(\"User already exists: %s\", username)\n\treturn false\n}", "func Register(r * http.Request, response * APIResponse) {\n\tif AllowsRegister {\n\t\tif r.FormValue(\"username\") != \"\" && r.FormValue(\"password\") != \"\" && r.FormValue(\"name\") != \"\" {\n\t\t\tusername := r.FormValue(\"username\")\n\t\t\tpassword := r.FormValue(\"password\")\n\t\t\trealName := r.FormValue(\"name\")\n\t\t\tif len(password) > 5 && userNameIsValid(username) && nameIsValid(realName) {\n\t\t\t\tif !UserForUserNameExists(username) {\n\t\t\t\t\t//The password is acceptable, the username is untake and acceptable\n\t\t\t\t\t//Sign up user\n\t\t\t\t\tuser := User{}\n\t\t\t\t\tuser.Username = username\n\t\t\t\t\tuser.HashedPassword = hashString(password)\n\t\t\t\t\tuser.UserImageURL = \"userImages/default.png\"\n\t\t\t\t\tuser.RealName = realName\n\t\t\t\t\tAddUser(&user)\n\t\t\t\t\n\t\t\t\t\t//Log the user in\n\t\t\t\t\tLogin(r, response)\n\t\t\t\t} else {\n\t\t\t\t\tresponse.Message = \"Username already taken\"\n\t\t\t\t\te(\"API\", \"Username already taken\")\n\t\t\t\t\tresponse.SuccessCode = 400\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tresponse.Message = \"Values do not meet requirements\"\n\t\t\t\te(\"API\", \"Password is too short or username is invalid\")\n\t\t\t\tresponse.SuccessCode = 400\n\t\t\t}\n\t\t} else {\n\t\t\tresponse.Message = \"More information required\"\n\t\t\te(\"API\", \"Couldn't register user - not enough detail\")\n\t\t\tresponse.SuccessCode = 400\n\t\t}\n\t} else {\n\t\tresponse.SuccessCode = 400\n\t\tresponse.Message = \"Server doesn't allow registration\"\n\t}\n}", "func RegisterHandler(w http.ResponseWriter, r *http.Request) {\n\tr.ParseForm()\n\tusername := r.PostFormValue(\"username\")\n\temail := r.PostFormValue(\"email\")\n\tpassword := r.PostFormValue(\"password\")\n\tuser, err := models.RegisterUser(username, email, password)\n\tif err != nil {\n\t\tlog.Print(err)\n\t} else {\n\t\tlog.Print(user)\n\t}\n}", "func RegisterUser(w http.ResponseWriter, r *http.Request) {\n\t//var dadosLogin = mux.Vars(r)\n\tname := r.FormValue(\"name\")\n\temail := r.FormValue(\"email\")\n\tuser := r.FormValue(\"usuario\")\n\tpass := r.FormValue(\"pass\")\n\n\tpass, _ = helpers.HashPassword(pass)\n\n\tsql := \"INSERT INTO users (nome, email, login, pass) VALUES (?, ?, ?, ?) \"\n\tstmt, err := cone.Db.Exec(sql, name, email, user, pass)\n\tif err != nil {\n\t\tlog.Fatal(err.Error())\n\t\treturn\n\t}\n\t_, errs := stmt.RowsAffected()\n\tif errs != nil {\n\t\tlog.Fatal(err.Error())\n\t\treturn\n\t}\n\thttp.Redirect(w, r, \"/\", 301)\n}", "func (u *User) Create(c echo.Context, req *schemago.ReqCreateUser) (*schemago.SUser, error) {\n\t// if err := u.rbac.AccountCreate(c, req.RoleID, req.CompanyID, req.LocationID); err != nil {\n\t// \treturn nil, err\n\t// }\n\treq.Password = u.sec.Hash(req.Password)\n\treturn u.udb.Create(u.db, u.ce, req)\n}", "func (AuthenticationController) Register(c *gin.Context) {\n\tvar registrationPayload forms.RegistrationForm\n\tif validationErr := c.BindJSON(&registrationPayload); validationErr != nil {\n\t\tutils.CreateError(c, http.StatusBadRequest, validationErr.Error())\n\t\treturn\n\t}\n\tif hashedPass, hashErr := utils.EncryptPassword(registrationPayload.Password); hashErr != nil {\n\t\tutils.CreateError(c, http.StatusInternalServerError, \"Failed to hash password.\")\n\t} else {\n\t\tregistrationPayload.Password = hashedPass\n\t\tuser, prismaErr := client.CreateUser(prisma.UserCreateInput{\n\t\t\tEmail: registrationPayload.Email,\n\t\t\tName: registrationPayload.Name,\n\t\t\tUsername: registrationPayload.Username,\n\t\t\tPassword: registrationPayload.Password,\n\t\t\tRole: prisma.RoleDefault,\n\t\t}).Exec(contextB)\n\n\t\tif prismaErr != nil {\n\t\t\tlog.Print(prismaErr)\n\t\t\tutils.CreateError(c, http.StatusNotAcceptable, \"Failed to save profile.\")\n\t\t\treturn\n\t\t}\n\t\t// setting session keys\n\t\tsession := sessions.Default(c)\n\t\tsession.Set(\"uuid\", user.ID)\n\t\tsession.Set(\"email\", user.Email)\n\t\tsession.Set(\"username\", user.Username)\n\t\tsession.Set(\"role\", string(user.Role))\n\n\t\tif sessionErr := session.Save(); sessionErr != nil {\n\t\t\tutils.CreateError(c, http.StatusInternalServerError, sessionErr.Error())\n\t\t\tc.Abort()\n\t\t\treturn\n\t\t}\n\t\tc.JSON(http.StatusOK, gin.H{\n\t\t\t\"name\": user.Name,\n\t\t\t\"username\": user.Username,\n\t\t\t\"role\": user.Role,\n\t\t})\n\t}\n}", "func (a *ApiDB) UserRegister(w http.ResponseWriter, r *http.Request) {\n\tw.Header().Add(\"Content-Type\", \"application/json\")\n\tp := MODELS.RequestRegister{}\n\terr1 := json.NewDecoder(r.Body).Decode(&p)\n\tif err1 != nil {\n\t\tio.WriteString(w, `{\"message\": \"wrong format!\"}`)\n\t\treturn\n\t}\n\n\t_, err := BUSINESS.Register(a.Db, p)\n\tif err == nil {\n\t\tio.WriteString(w, `{\n\t\t\t \t\"status\": 200,\n\t\t\t\t\"message\":\"Register success\",\n\t\t\t\t\"data\": {\n\t\t\t\t\t\"status\": 1\n\t\t\t\t}\n\t\t\t}`)\n\t} else {\n\t\tio.WriteString(w, `{\"message\":\"Register fail\"}`)\n\t}\n}", "func RegisterNewUser(user *model.RegisterUserRequest) error {\n\tdb, err := connectDB()\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn errors.New(\"Cannat connect to database\")\n\t}\n\n\tdefer db.Close()\n\tinsertStmt := `INSERT INTO users.members \n\t\t\t\t\t\t(p_token, username, firstname, lastname, email, phone) \n\t\t\t\t VALUES \n\t\t\t\t\t\t($1, $2, $3, $4, $5, $6);`\n\n\t_, err = db.Exec(insertStmt, user.PToken, user.UserName, user.FirstName, user.LastName, user.Email, user.Phone)\n\tif err != nil {\n\t\tlog.Println(err.Error())\n\t\treturn errors.New(\"Cannot register new user\")\n\t}\n\n\treturn nil\n}", "func RegisterUser(serverURI, username, password, email string) {\n\tregisterUserBody := struct {\n\t\tUsername string `json:\"username\"`\n\t\tPassword string `json:\"password\"`\n\t\tEmail string `json:\"email\"`\n\t\tConfirmPassword string `json:\"confirmPassword\"`\n\t}{\n\t\tusername,\n\t\tpassword,\n\t\temail,\n\t\tpassword,\n\t}\n\tdata, merr := json.Marshal(registerUserBody)\n\tif merr != nil {\n\t\tΩ(merr).ShouldNot(HaveOccurred())\n\t}\n\n\tres, err := http.Post(serverURI+\"/v3/user/auth/local/register\", \"application/json\", bytes.NewBuffer(data))\n\tΩ(err).ShouldNot(HaveOccurred())\n\tΩ(res.StatusCode).ShouldNot(BeNumerically(\">=\", 300))\n}", "func (auth *Authenticator) RegisterNewUser(username, email string) (User, error) {\n\tuser, err := auth.NewUser(username, base.GenerateRandomSecret(), base.Set{})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tuser.SetEmail(email)\n\terr = auth.Save(user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn user, err\n}", "func RegisterNewUser(dataNewUser NewUser) (User, error) {\n\tuser := User{}\n\n\tuser.Name = strings.TrimSpace(dataNewUser.Name)\n\tuser.LastName = strings.TrimSpace(dataNewUser.LastName)\n\tuser.Password = EncryptPassword(strings.TrimSpace(dataNewUser.Password))\n\tuser.Email = strings.TrimSpace(dataNewUser.Email)\n\tuser.Address = strings.TrimSpace(dataNewUser.Address)\n\tuser.City = \"BOGOTÁ\"\n\tuser.MobilePhone = dataNewUser.MobilePhone\n\tuser.Credential = strings.TrimSpace(dataNewUser.Credential)\n\tuser.ProfilePic = strings.TrimSpace(dataNewUser.ProfilePic)\n\tuser.Role = dataNewUser.Role\n\n\terr := shared.GetDb().Create(&user).Error\n\tif err != nil {\n\t\treturn user, err\n\t}\n\treturn user, nil\n}", "func (c *Client) RegisterUser(ctx context.Context, username, password, firstname, lastname, email, gender string) (string, error) {\n\tr, err := c.service.UserRegister(\n\t\tctx,\n\t\t&pb.UserRegisterRequest{\n\t\t\tRegister: &pb.Register{\n\t\t\t\tUsername: username,\n\t\t\t\tPassword: password,\n\t\t\t\tFirstname: firstname,\n\t\t\t\tLastname: lastname,\n\t\t\t\tGender: gender,\n\t\t\t\tStatus: \"\",\n\t\t\t},\n\t\t})\n\tif err != nil {\n\t\treturn \"\", nil\n\t}\n\tlog.Println(r.User)\n\treturn \"created\", nil\n}", "func RegisterUser(c echo.Context) error {\n\tuser := new(model.RegisterUserRequest)\n\tif err := validateRequest(c, user); err != nil {\n\t\treturn c.JSON(fasthttp.StatusBadRequest, model.NewErrorResponse(\"2002\", err.Error()))\n\t}\n\tif err := service.RegisterUser(user); err != nil {\n\t\treturn c.JSON(fasthttp.StatusInternalServerError, model.NewErrorResponse(\"2002\", err.Error()))\n\t}\n\treturn c.JSON(fasthttp.StatusOK, model.NewRegisterUserResponse(\"0\", \"Success\"))\n}", "func Register(c echo.Context) error {\n\n\tRegisterAttempt := types.RegisterAttempt{}\n\terr := c.Bind(&RegisterAttempt)\n\tif err != nil {\n\t\treturn c.JSON(http.StatusBadRequest, map[string]string{\"result\": \"error\", \"details\": \"Error binding register attempt.\"})\n\t}\n\n\tuserCreated, err := util.NewUser(RegisterAttempt.User, RegisterAttempt.Pass, RegisterAttempt.PassCheck)\n\tif err != nil {\n\t\tmsgUser := err.Error()\n\t\treturn c.JSON(http.StatusOK, msgUser)\n\t}\n\n\tif userCreated {\n\t\tmsgUser := fmt.Sprintf(\"User %s created!\", RegisterAttempt.User)\n\t\treturn c.String(http.StatusOK, msgUser)\n\t}\n\n\tmsgUser := fmt.Sprintf(\"User already exists or passwords don't match!\")\n\treturn c.String(http.StatusOK, msgUser)\n}", "func HandleUserRegister(context *gin.Context) {\n\n\tuserAcc := context.PostForm(\"user_acc\")\n\tuserAvatar := context.PostForm(\"user_avatar\")\n\tuserNickName := context.PostForm(\"user_nick_name\")\n\tuserPassword := context.PostForm(\"user_password\")\n\tuserPhone := context.PostForm(\"user_phone\")\n\tuserEmail := context.PostForm(\"user_email\")\n\tuserGender := context.PostForm(\"user_gender\")\n\tuserSign := context.PostForm(\"user_sign\")\n\n\tuserType := context.PostForm(\"user_type\")\n\tuserTypeInt, _ := strconv.Atoi(userType)\n\n\tif userAcc == \"\" || userNickName == \"\" || userPassword == \"\"{\n\t\tcontext.JSON(http.StatusBadRequest, gin.H{\n\t\t\t\"status\": \"invalid\",\n\t\t\t\"code\": http.StatusBadRequest,\n\t\t\t\"msg\": \"user_acc, user_nick_name, user_password must not be none\",\n\t\t\t\"data\": \"\",\n\t\t})\n\t}\n\tuser := models.User{\n\t\tUserAcc:userAcc,\n\t\tUserAvatar:userAvatar,\n\t\tUserNickName:userNickName,\n\t\tUserPassword:userPassword,\n\t\tUserPhone:userPhone,\n\t\tUserEmail:userEmail,\n\t\tUserGender:userGender,\n\t\tUserSign:userSign,\n\t\tUserType:models.UserType(userTypeInt),\n\t}\n\tuserTry := models.User{}\n\tif db.DB.Where(\"user_acc=?\", userAcc).First(&userTry).RecordNotFound(){\n\t\t// user not found, create it\n\t\tdb.DB.Create(&user)\n\t\tuAddr := utils.GenAddr(user.ID)\n\t\tuser.UserAddr = \"usr\" + uAddr\n\n\t\tlog.Infof(\"FUCK GenAddr: %s gened: %s\", user.UserAddr, uAddr)\n\t\tdb.DB.Save(&user)\n\n\t\t// should return a token to user, as well as login\n\t\tclaims := make(map[string]interface{})\n\t\tclaims[\"id\"] = user.ID\n\t\tclaims[\"msg\"] = \"hiding egg\"\n\t\tclaims[\"user_addr\"] = user.UserAddr\n\t\ttoken, _ := utils.Encrypt(claims)\n\t\tlog.Infof(\"Request new user: %s, it is new.\", user)\n\t\tdata := map[string]interface{}{\"token\": token, \"id\": user.ID, \"user_addr\": user.UserAddr}\n\t\tcontext.JSON(200, gin.H{\n\t\t\t\"status\": \"success\",\n\t\t\t\"code\": http.StatusOK,\n\t\t\t\"msg\": \"user register succeed.\",\n\t\t\t\"data\": data,\n\t\t})\n\t}else{\n\t\tlog.Info(\"user exist.\")\n\t\tcontext.JSON(200, gin.H{\n\t\t\t\"status\": \"conflict\",\n\t\t\t\"code\": http.StatusConflict,\n\t\t\t\"msg\": \"user already exist.\",\n\t\t\t\"data\": nil,\n\t\t})\n\t}\n}", "func Register(write http.ResponseWriter, request *http.Request) {\n\tvar object models.User\n\terr := json.NewDecoder(request.Body).Decode(&object)\n\tif err != nil {\n\t\thttp.Error(write, \"An error ocurred in user register. \"+err.Error(), 400)\n\t\treturn\n\t}\n\n\t//Validations\n\tif len(object.Email) == 0 {\n\t\thttp.Error(write, \"Email is required.\", 400)\n\t\treturn\n\t}\n\tif len(object.Password) < 6 {\n\t\thttp.Error(write, \"Password invalid, must be at least 6 characters.\", 400)\n\t\treturn\n\t}\n\n\t_, userFounded, _ := bd.CheckExistUser(object.Email)\n\n\tif userFounded {\n\t\thttp.Error(write, \"The email has already been registered.\", 400)\n\t\treturn\n\t}\n\n\t_, status, err := bd.InsertRegister(object)\n\n\tif err != nil {\n\t\thttp.Error(write, \"An error occurred in insert register user.\"+err.Error(), 400)\n\t\treturn\n\t}\n\n\tif !status {\n\t\thttp.Error(write, \"Not insert user register.\"+err.Error(), 400)\n\t\treturn\n\t}\n\n\twrite.WriteHeader(http.StatusCreated)\n}", "func RegisterlUser(username, password, department string) error {\n\tsdk := fabsdk.FabricSDK{}\n\tmspClient, err := msp.New(sdk.Context(), msp.WithOrg(\"Org1\"))\n\tif err != nil {\n\t\tfmt.Printf(\"Failed to create msp client: %s\\n\", err)\n\t}\n\trequest := &msp.RegistrationRequest{\n\t\tName: username,\n\t\tType: \"user\",\n\t\tAffiliation: department,\n\t\tSecret: password,\n\t}\n\n\tsecret, err := mspClient.Register(request)\n\tif err != nil {\n\t\tfmt.Printf(\"register %s [%s]\\n\", username, err)\n\t\treturn err\n\t}\n\tfmt.Printf(\"register %s successfully,with password %s\\n\", username, secret)\n\treturn nil\n}", "func (m Users) Register(user User) error {\n\tif !isValidPass(user.Password) {\n\t\treturn ErrInvalidPass\n\t}\n\tif !validEmail.MatchString(user.Email) {\n\t\treturn ErrInvalidEmail\n\t}\n\thash, err := hashPassword(user.Password)\n\tif err != nil {\n\t\treturn err\n\t}\n\tsqlStatement := `INSERT INTO users (email, password) VALUES($1, $2) RETURNING id, created_at;`\n\t_, err = m.DB.Exec(sqlStatement, user.Email, hash)\n\tif err, ok := err.(*pq.Error); ok {\n\t\tif err.Code == \"23505\" {\n\t\t\treturn ErrUserAlreadyExist\n\t\t}\n\t}\n\n\treturn err\n}", "func init() {\n\tuserFields := schema.User{}.Fields()\n\t_ = userFields\n\t// userDescLogin is the schema descriptor for login field.\n\tuserDescLogin := userFields[0].Descriptor()\n\t// user.LoginValidator is a validator for the \"login\" field. It is called by the builders before save.\n\tuser.LoginValidator = userDescLogin.Validators[0].(func(string) error)\n\t// userDescName is the schema descriptor for name field.\n\tuserDescName := userFields[1].Descriptor()\n\t// user.NameValidator is a validator for the \"name\" field. It is called by the builders before save.\n\tuser.NameValidator = userDescName.Validators[0].(func(string) error)\n\t// userDescPassword is the schema descriptor for password field.\n\tuserDescPassword := userFields[2].Descriptor()\n\t// user.PasswordValidator is a validator for the \"password\" field. It is called by the builders before save.\n\tuser.PasswordValidator = userDescPassword.Validators[0].(func(string) error)\n\t// userDescIsActive is the schema descriptor for isActive field.\n\tuserDescIsActive := userFields[3].Descriptor()\n\t// user.DefaultIsActive holds the default value on creation for the isActive field.\n\tuser.DefaultIsActive = userDescIsActive.Default.(bool)\n}", "func CreateUserHandler(w http.ResponseWriter, req *http.Request) {\n // Validate internal token.\n if internalToken := req.Header.Get(app.Config.AuthHeaderName); internalToken != app.Config.RestApiToken {\n respond.Error(w, errmsg.Unauthorized())\n return\n }\n\n // Parse & validate payload.\n var pl payload.CreateUserPayload\n\n if !pl.Validate(req) {\n respond.Error(w, errmsg.InvalidPayload())\n return\n }\n\n // Check if the executor is using the USER_CREATION_HASH to create this user.\n usingUserCreationPw := pl.ExecutorEmail == \"\" && app.Config.UserCreationHash != \"\" &&\n crypt.VerifySha256(pl.ExecutorPassword, app.Config.UserCreationHash)\n\n // If not using USER_CREATION_HASH for auth, verify executor exists using email/pw.\n if !usingUserCreationPw {\n // Get executor user by email.\n executorUser, err := usersvc.FromEmail(pl.ExecutorEmail)\n\n if err != nil {\n app.Log.Errorln(err.Error())\n respond.Error(w, errmsg.UserNotFound())\n return\n }\n\n // Ensure executor user's password is correct.\n if !crypt.VerifyBcrypt(pl.ExecutorPassword, executorUser.HashedPw) {\n app.Log.Errorln(\"error creating new User: invalid executor user password\")\n respond.Error(w, errmsg.Unauthorized())\n return\n }\n\n // Only admin users can create other users.\n if !executorUser.Admin {\n app.Log.Errorln(\"error creating new User: executor user must be an admin\")\n respond.Error(w, errmsg.Unauthorized())\n return\n }\n }\n\n // Hash provided user password.\n hashedPw, err := crypt.BcryptHash(pl.NewPassword)\n\n if err != nil {\n app.Log.Errorf(\"error creating new User: bcrypt password hash failed with %s\\n\", err.Error())\n respond.Error(w, errmsg.ISE())\n return\n }\n\n // Create new User.\n newUser, err := usersvc.Create(pl.NewEmail, hashedPw, pl.Admin)\n\n if err != nil {\n app.Log.Errorln(err.Error())\n pqError, ok := err.(*pq.Error)\n\n if ok && pqError.Code.Name() == \"unique_violation\" {\n respond.Error(w, errmsg.EmailNotAvailable())\n } else {\n respond.Error(w, errmsg.UserCreationFailed())\n }\n\n return\n }\n\n // Create response payload and respond.\n respData := successmsg.UserCreationSuccess\n respData[\"uid\"] = newUser.Uid\n\n respond.Created(w, respData)\n}", "func (dbHandler *DbHandler) CreateUser(user user.User) (err error) {\n\tpsw, _ := bcrypt.GenerateFromPassword([]byte(user.Password), bcrypt.DefaultCost)\n\tuser.Password = string(psw)\n\terr = dbHandler.handlers[UserCollection].GetCollection().Insert(user)\n\treturn\n}", "func (a *API) addUser(w http.ResponseWriter, req *http.Request) {\n\t// NOTE(kiennt): Who can signup (create new user)?\n\tif err := req.ParseForm(); err != nil {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusBadRequest,\n\t\t\terr: err,\n\t\t})\n\t\treturn\n\t}\n\tusername := req.Form.Get(\"username\")\n\tpassword := req.Form.Get(\"password\")\n\tif username == \"\" || password == \"\" {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusBadRequest,\n\t\t\terr: errors.New(\"Incorrect sign up form\"),\n\t\t})\n\t\treturn\n\t}\n\n\t// Check to see if the user is already taken\n\tpath := common.Path(model.DefaultUsersPrefix, common.Hash(username, crypto.MD5))\n\tresp, err := a.etcdcli.DoGet(path)\n\tif err != nil {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusInternalServerError,\n\t\t\terr: err,\n\t\t})\n\t\treturn\n\t}\n\tif len(resp.Kvs) != 0 {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusBadRequest,\n\t\t\terr: errors.New(\"The username is already taken\"),\n\t\t})\n\t\treturn\n\t}\n\t// Do not store the plain text password, encrypt it!\n\thashed, err := common.GenerateBcryptHash(password, config.Get().PasswordHashingCost)\n\tif err != nil {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusInternalServerError,\n\t\t\terr: errors.Wrap(err, \"Something went wrong\"),\n\t\t})\n\t\treturn\n\t}\n\n\tuser := &model.User{\n\t\tUsername: username,\n\t\tPassword: hashed,\n\t}\n\t_ = user.Validate()\n\tr, _ := json.Marshal(&user)\n\t_, err = a.etcdcli.DoPut(path, string(r))\n\tif err != nil {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusInternalServerError,\n\t\t\terr: errors.Wrap(err, \"Unable to put a key-value pair into etcd\"),\n\t\t})\n\t\treturn\n\t}\n\t// Add user permission to view clouds\n\tif ok, err := a.policyEngine.AddPolicy(username, \"/clouds\", \"GET\"); !ok || err != nil {\n\t\ta.respondError(w, apiError{\n\t\t\tcode: http.StatusInternalServerError,\n\t\t\terr: errors.Wrap(err, \"Unable to add view cloud permission\"),\n\t\t})\n\t\treturn\n\t}\n\ta.respondSuccess(w, http.StatusOK, nil)\n}", "func Register(w http.ResponseWriter, r *http.Request) {\n\n\tmessages := make([]string, 0)\n\ttype MultiErrorMessages struct {\n\t\tMessages []string\n\t}\n\n\t//Get Formdata\n\tusername := r.FormValue(\"username\")\n\tpassword := r.FormValue(\"password\")\n\temailadress := r.FormValue(\"email\")\n\trepeatPassword := r.FormValue(\"repeatpassword\")\n\n\t//Check Password\n\tif password != repeatPassword {\n\n\t\t//Add error Message\n\t\tmessages = append(messages, \"Passwort ist nicht richtig wiedeholt worden.\")\n\n\t}\n\n\t//Check Email\n\temail, err := mail.ParseAddress(emailadress)\n\tif err != nil || !strings.Contains(email.Address, \".\") {\n\n\t\t//Add error Message\n\t\tmessages = append(messages, \"Dies ist keine gültige Emailadresse.\")\n\n\t}\n\n\t//Fill Model\n\tuser := model.User{}\n\tuser.Name = username\n\tuser.Password = password\n\tuser.Email = emailadress\n\tuser.Type = \"User\"\n\n\t//Try and check Creating User\n\terr = user.CreateUser()\n\tif err != nil {\n\n\t\t//Write Data\n\t\tmessages = append(messages, err.Error())\n\n\t}\n\n\t//Check if any Error Message was assembled\n\tif len(messages) != 0 {\n\n\t\tresponseModel := MultiErrorMessages{\n\t\t\tMessages: messages,\n\t\t}\n\n\t\tresponseJSON, err := json.Marshal(responseModel)\n\t\tif err != nil {\n\n\t\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\t\tw.Write([]byte(err.Error()))\n\n\t\t}\n\n\t\tw.Header().Set(\"Content-Type\", \"application/json; charset=UTF-8\")\n\t\tw.WriteHeader(http.StatusConflict)\n\t\tw.Write(responseJSON)\n\t\treturn\n\t}\n\n\t//Hash Username\n\tmd5HashInBytes := md5.Sum([]byte(user.Name))\n\tmd5HashedUsername := hex.EncodeToString(md5HashInBytes[:])\n\n\t//Create Session\n\tsession, _ := store.Get(r, \"session\")\n\tsession.Values[\"authenticated\"] = true\n\tsession.Values[\"username\"] = username\n\tsession.Values[\"hashedusername\"] = md5HashedUsername\n\tsession.Save(r, w)\n\n\t//Write Respone\n\thttp.Redirect(w, r, \"/users?action=userdata\", http.StatusFound)\n}", "func (r *mutationResolver) CreateUserss(ctx context.Context, input *model.RegisterInput) (*model.Register, error) {\n\tuser := &model.Register{\n\t\tID: fmt.Sprintf(\"T%d\", rand.Int()),\n\t\tName: input.Name,\n\t\tLastname: input.Lastname,\n\t\tPassword: input.Password,\n\t}\n\tr.user = append(r.user, user)\n\treturn user, nil\n}", "func Register(c *gin.Context) {\n\tdb := c.MustGet(\"db\").(*gorm.DB)\n\n\tvar input models.CreateUserInput\n\tif err := c.ShouldBindJSON(&input); err != nil {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": err.Error(), \"message\": \"signUp\", \"status\": false})\n\t\treturn\n\t}\n\t//ensure unique\n\tvar user models.User\n\tif err := db.Where(\"email = ?\", input.Email).First(&user).Error; err == nil {\n\t\tc.JSON(http.StatusBadRequest, gin.H{\"error\": \"Email Taken!\", \"message\": \"signUp\", \"status\": false})\n\t\treturn\n\t}\n\t//create user\n\thashedPassword, _ := bcrypt.GenerateFromPassword([]byte(input.Password), bcrypt.DefaultCost)\n\thashPass := string(hashedPassword)\n\ttk := models.Token{Email: input.Email}\n\ttoken := jwt.NewWithClaims(jwt.GetSigningMethod(\"HS256\"), tk)\n\ttokenString, _ := token.SignedString([]byte(os.Getenv(\"token_password\")))\n\tuser2 := models.User{UserName: input.UserName, Email: input.Email, Password: hashPass, UserType: input.UserType, Token: tokenString}\n\tdb.Create(&user2)\n\tc.JSON(http.StatusOK, gin.H{\"user\": user2, \"message\": \"signUp\", \"status\": true})\n}", "func (s Server) RegisterUser(ctx context.Context, data *userRPC.RegisterRequest) (*userRPC.LoginResponse, error) {\n\tuserID, url, tmpToken, err := s.service.CreateNewAccount(ctx, registerRequestToAccount(data), data.GetPassword())\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &userRPC.LoginResponse{\n\t\tStatus: userRPC.Status_NOT_ACTIVATED,\n\t\tToken: tmpToken,\n\t\tURL: url,\n\t\tUserId: userID,\n\t}, nil\n}", "func (rbac *RBAC) RegisterUser(u User) bool {\n\trbac.mutex.Lock()\n\tdefer rbac.mutex.Unlock()\n\n\t_, ok := rbac.registeredUsers[u]\n\tif ok {\n\t\treturn false\n\t}\n\trbac.registeredUsers[u] = struct{}{}\n\treturn true\n}", "func AddUser(w http.ResponseWriter, r *http.Request, db *gorp.DbMap, c *businesscontext.Ctx) error {\n\t//returns forbidden if LDAP mode is activated\n\tif _, ldap := router.authDriver.(*auth.LDAPClient); ldap {\n\t\treturn sdk.ErrForbidden\n\t}\n\n\tcreateUserRequest := sdk.UserAPIRequest{}\n\tif err := UnmarshalBody(r, &createUserRequest); err != nil {\n\t\treturn err\n\t}\n\n\tif createUserRequest.User.Username == \"\" {\n\t\treturn sdk.WrapError(sdk.ErrInvalidUsername, \"AddUser: Empty username is invalid\")\n\t}\n\n\tif !user.IsValidEmail(createUserRequest.User.Email) {\n\t\treturn sdk.WrapError(sdk.ErrInvalidEmail, \"AddUser: Email address %s is not valid\", createUserRequest.User.Email)\n\t}\n\n\tu := createUserRequest.User\n\tu.Origin = \"local\"\n\n\t// Check that user does not already exists\n\tquery := `SELECT * FROM \"user\" WHERE username = $1`\n\trows, err := db.Query(query, u.Username)\n\tif err != nil {\n\t\treturn sdk.WrapError(err, \"AddUsers: Cannot check if user %s exist\", u.Username)\n\t}\n\tdefer rows.Close()\n\tif rows.Next() {\n\t\treturn sdk.WrapError(sdk.ErrUserConflict, \"AddUser: User %s already exists\", u.Username)\n\t}\n\n\ttokenVerify, hashedToken, errg := user.GeneratePassword()\n\tif errg != nil {\n\t\treturn sdk.WrapError(errg, \"AddUser: Error while generate Token Verify for new user\")\n\t}\n\n\tauth := sdk.NewAuth(hashedToken)\n\n\tnbUsers, errc := user.CountUser(db)\n\tif errc != nil {\n\t\treturn sdk.WrapError(errc, \"AddUser: Cannot count user\")\n\t}\n\tif nbUsers == 0 {\n\t\tu.Admin = true\n\t} else {\n\t\tu.Admin = false\n\t}\n\n\tif err := user.InsertUser(db, &u, auth); err != nil {\n\t\treturn sdk.WrapError(err, \"AddUser: Cannot insert user\")\n\t}\n\n\tgo mail.SendMailVerifyToken(createUserRequest.User.Email, createUserRequest.User.Username, tokenVerify, createUserRequest.Callback)\n\n\t// If it's the first user, add him to shared.infra group\n\tif nbUsers == 0 {\n\t\tif err := group.AddAdminInGlobalGroup(db, u.ID); err != nil {\n\t\t\treturn sdk.WrapError(err, \"AddUser: Cannot add user in global group\")\n\t\t}\n\t}\n\n\treturn WriteJSON(w, r, u, http.StatusCreated)\n}", "func (a *App) CreateUser(w http.ResponseWriter, r *http.Request) {\n\thandler.CreateUser(a.DB, w, r)\n}", "func CreateUser(response http.ResponseWriter, request *http.Request) {\n\n\t\n\t\trequest.ParseForm()\n\t\tdecoder := json.NewDecoder(request.Body)\n\t\tvar newUser User\n\t\t\n\t\terr := decoder.Decode(&newUser)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\t\n newUser.Password=hashAndSalt([]byte(newUser.Password))\n\t\t\n\t\tinsertUser(newUser)\n\t\n}", "func CreateUser(db *mongo.Client, w http.ResponseWriter, r *http.Request) {\n\tuser, err := parseUser(r)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tuser.Password = encryptPassword([]byte(user.Password))\n\t_, err = db.Database(\"charon\").Collection(\"users\").InsertOne(context.TODO(), user)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tio.WriteString(w, \"User successfully added.\")\n}", "func (u *UserService) Register(ctx context.Context, in *userpbgw.RegisterRequest) (*userpbgw.Response, error) {\n\tisExisted, err := user.CheckExistingEmail(in.Email)\n\tif err != nil {\n\t\treturn &userpbgw.Response{\n\t\t\tError: 2222,\n\t\t\tMessage: fmt.Sprintf(\"Error: %s\", err),\n\t\t}, nil\n\t}\n\tif isExisted {\n\t\treturn &userpbgw.Response{\n\t\t\tError: 2222,\n\t\t\tMessage: \"Given email is existing\",\n\t\t}, nil\n\t}\n\tnewuser := user.User{\n\t\tFullname: in.Fullname,\n\t\tEmail: in.Email,\n\t\tPassword: in.Password,\n\t}\n\terr = user.Insert(&newuser)\n\tif err != nil {\n\t\treturn &userpbgw.Response{\n\t\t\tError: 2222,\n\t\t\tMessage: \"Register Error\",\n\t\t}, nil\n\t}\n\treturn &userpbgw.Response{\n\t\tError: 0,\n\t\tMessage: \"Register Sucessfull\",\n\t}, nil\n}", "func InitUserController() {\n\tmanager = managers.NewUserManager()\n}", "func (c *Client) CreateUser(ctx context.Context, user, pass, role string) (ok bool, err error) {\n\tok, metadata, err := bmc.CreateUserFromInterfaces(ctx, user, pass, role, c.Registry.GetDriverInterfaces())\n\tc.setMetadata(metadata)\n\treturn ok, err\n}", "func (cs *UserSetup) Execute() (execError error) {\n\tcs.wg.Add(1)\n\tdefer func() {\n\t\tcs.err = execError\n\t\tcs.wg.Done()\n\t}()\n\n\t// check if we are root\n\tif os.Geteuid() != 0 {\n\t\treturn fmt.Errorf(\"Not running as root, cannot setup user %s\", cs.config.Name())\n\t}\n\n\tconf := cs.config\n\tif conf.Container == \"\" {\n\t\treturn fmt.Errorf(\"User %s must have container specified.\", conf.Name())\n\t}\n\n\tconf.Container = ensureSlashPrefix(conf.Container)\n\tif !cs.waiter.CheckHasContainer(conf.Container) {\n\t\treturn fmt.Errorf(\"User %s: no such container: %s\", conf.Name(), conf.Container)\n\t}\n\n\tle := log.WithField(\"user\", conf.Name())\n\tshellPath, err := pathToSkiffCore()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// ensure only one routine managing users at a time\n\teuser, err := func() (*user.User, error) {\n\t\tglobalCreateHostUserMtx.Lock()\n\t\tdefer globalCreateHostUserMtx.Unlock()\n\n\t\teuser, eusererr := user.Lookup(conf.Name())\n\t\tif eusererr != nil {\n\t\t\tif _, ok := eusererr.(user.UnknownUserError); !ok {\n\t\t\t\treturn nil, eusererr\n\t\t\t}\n\t\t}\n\n\t\tif euser == nil {\n\t\t\tif !cs.create {\n\t\t\t\treturn nil, fmt.Errorf(\"User %s: not found, and create-users is not enabled.\", conf.Name())\n\t\t\t}\n\n\t\t\t// attempt to create the user\n\t\t\tle.Debug(\"Creating user\")\n\t\t\terr = execCmd(\n\t\t\t\t\"adduser\",\n\t\t\t\t\"-G\",\n\t\t\t\t\"docker\",\n\t\t\t\t\"-D\",\n\t\t\t\t//\"-c\",\n\t\t\t\t// fmt.Sprintf(\"Skiff-Core user %s\", cs.config.Name()),\n\t\t\t\t// \"-m\",\n\t\t\t\t\"-s\",\n\t\t\t\tshellPath,\n\t\t\t\tcs.config.Name(),\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t\teuser, err = user.Lookup(cs.config.Name())\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t} else {\n\t\t\t// Set the shell for the user\n\t\t\tle.WithField(\"path\", shellPath).Debug(\"Setting shell\")\n\t\t\tif err := execCmd(\"chsh\", \"-s\", shellPath, cs.config.Name()); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\treturn euser, nil\n\t}()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tuid, err := strconv.Atoi(euser.Uid)\n\tif err != nil {\n\t\treturn err\n\t}\n\tgid, err := strconv.Atoi(euser.Gid)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Set password\n\tvar nextPassword string\n\tvar allowEmptyPassword bool\n\tvar lock bool\n\tif cs.config.Auth != nil {\n\t\tnextPassword = cs.config.Auth.Password\n\t\tallowEmptyPassword = cs.config.Auth.AllowEmptyPassword\n\t\tlock = cs.config.Auth.Locked\n\t}\n\n\tif lock {\n\t\tle.Debug(\"Locking user\")\n\t\tif err := execCmd(\"passwd\", \"-l\", cs.config.Name()); err != nil {\n\t\t\tle.WithError(err).Warn(\"error while locking user\")\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tif nextPassword == \"\" && !allowEmptyPassword {\n\t\t\tle.Debug(\"Setting password to a long random value due to AllowEmptyPassword=false\")\n\t\t\tnextPassword = randomPassword()\n\t\t}\n\t\tif nextPassword == \"\" {\n\t\t\tle.Debug(\"Disabling password for user (setting to empty password)\")\n\t\t\tif err := execCmd(\"passwd\", \"-d\", cs.config.Name()); err != nil {\n\t\t\t\tle.WithError(err).Warn(\"error while unsetting user password\")\n\t\t\t\t// return err\n\t\t\t}\n\t\t} else {\n\t\t\tle.Debug(\"Setting password\")\n\t\t\tpasswd := strings.Replace(nextPassword, \"\\n\", \"\", -1)\n\t\t\tpasswordSet := strings.NewReader(fmt.Sprintf(\"%s\\n%s\\n\", passwd, passwd))\n\t\t\tcmd := exec.Command(\"passwd\", cs.config.Name())\n\t\t\tcmd.Stdin = passwordSet\n\t\t\tif err := cmd.Run(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tle.Debug(\"Setting up SSH keys\")\n\tsshDir := path.Join(euser.HomeDir, \".ssh\")\n\tauthorizedKeysPath := path.Join(sshDir, \"authorized_keys\")\n\tif _, err := os.Stat(euser.HomeDir); os.IsNotExist(err) {\n\t\tif err := os.MkdirAll(euser.HomeDir, 0755); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := os.Chown(euser.HomeDir, uid, gid); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif _, err := os.Stat(sshDir); os.IsNotExist(err) {\n\t\tif err := os.MkdirAll(sshDir, 0700); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif err := os.Chmod(sshDir, 0700); err != nil {\n\t\treturn err\n\t}\n\tif err := os.Chown(sshDir, uid, gid); err != nil {\n\t\treturn err\n\t}\n\n\tf, err := os.OpenFile(authorizedKeysPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\n\tauthConf := cs.config.Auth\n\tif authConf != nil {\n\t\tif authConf.CopyRootKeys {\n\t\t\trkf, err := os.Open(\"/root/.ssh/authorized_keys\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t_, err = io.Copy(f, rkf)\n\t\t\trkf.Close()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tf.WriteString(\"\\n\")\n\t\t}\n\t\tfor _, key := range authConf.SSHKeys {\n\t\t\tf.WriteString(key)\n\t\t\tf.WriteString(\"\\n\")\n\t\t}\n\t}\n\n\tf.Sync()\n\tf.Close()\n\tif err := os.Chown(authorizedKeysPath, uid, gid); err != nil {\n\t\treturn err\n\t}\n\n\tsetupPath := path.Join(euser.HomeDir, config.UserLogFile)\n\tlogFile, err := os.OpenFile(setupPath, os.O_TRUNC|os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer logFile.Close()\n\tlogFile.Sync()\n\tlogFile.Chown(uid, gid)\n\n\tcontainerId, err := cs.waiter.WaitForContainer(cs.config.Container, logFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif conf.ContainerUser != \"\" && conf.CreateContainerUser {\n\t\tglobalCreateContainerUserMtx.Lock()\n\t\t// Check if user exists.\n\t\tvar outp bytes.Buffer\n\t\t_ = cs.waiter.ExecCmdContainer(\n\t\t\tcontainerId,\n\t\t\t\"root\",\n\t\t\tnil, nil, &outp, // catch stderr only\n\t\t\t\"id\", conf.ContainerUser,\n\t\t)\n\t\terrStr := strings.TrimSpace(outp.String())\n\t\tif strings.HasSuffix(errStr, \"no such user\") {\n\t\t\tule := le.\n\t\t\t\tWithField(\"container-user\", conf.ContainerUser).\n\t\t\t\tWithField(\"container-id\", containerId)\n\t\t\tule.Debug(\"Creating container user...\")\n\t\t\terr = cs.waiter.ExecCmdContainer(\n\t\t\t\tcontainerId, \"root\",\n\t\t\t\tnil, os.Stderr, os.Stderr,\n\t\t\t\t\"useradd\", conf.ContainerUser,\n\t\t\t)\n\t\t\tif err != nil {\n\t\t\t\tule.\n\t\t\t\t\tWithError(err).\n\t\t\t\t\tWarn(\"Unable to create container user\")\n\t\t\t}\n\t\t}\n\n\t\tglobalCreateContainerUserMtx.Unlock()\n\t}\n\n\tuserConfPath := path.Join(euser.HomeDir, config.UserConfigFile)\n\tle.WithField(\"path\", userConfPath).Debug(\"Writing user config...\")\n\tuserConf := cs.config.ToConfigUserShell(containerId)\n\tuserConfData, err := userConf.Marshal()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tuserConfFile, err := os.OpenFile(userConfPath, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0640)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif _, err := userConfFile.Write(userConfData); err != nil {\n\t\treturn err\n\t}\n\tuserConfFile.Close()\n\n\treturn os.Chown(userConfPath, uid, gid)\n}", "func init() {\n\torm.RegisterModel(new(User))\n}", "func Register(w http.ResponseWriter, r *http.Request) {\n\tvar t models.User\n\terr := json.NewDecoder(r.Body).Decode((&t))\n\tif err != nil {\n\t\thttp.Error(w, \"Error en los datos recibidos \"+err.Error(), 400)\n\t\treturn\n\t}\n\tif len(t.Email) == 0 {\n\t\thttp.Error(w, \"El email es requerido\", 400)\n\t\treturn\n\t}\n\tif len(t.Password) < 6 {\n\t\thttp.Error(w, \"El password tiene que tener un mínimo de 6 caracteres\", 400)\n\t\treturn\n\t}\n\n\t_, found, _ := bd.CheckUserExist(t.Email)\n\n\tif found {\n\t\thttp.Error(w, \"Usuario ya existe\", 400)\n\t\treturn\n\t}\n\n\t_, status, err := bd.InsertRegister(t)\n\tif err != nil {\n\t\thttp.Error(w, \"Ocurrió un error al momento de registrar usuario\"+err.Error(), 400)\n\t\treturn\n\t}\n\n\tif !status {\n\t\thttp.Error(w, \"Ocurrió un error al momento de registrar usuario\", 400)\n\t\treturn\n\t}\n\n\tw.WriteHeader(http.StatusCreated)\n\n}", "func setupUserWithPIN(ctx, ctxForCleanUp context.Context, userName string, cmdRunner *hwsecremote.CmdRunnerRemote, helper *hwsecremote.CmdHelperRemote, userParam pinWeaverWithAuthAPIParam) error {\n\tcryptohomeHelper := helper.CryptohomeClient()\n\n\t// Start an Auth session and get an authSessionID.\n\t_, authSessionID, err := cryptohomeHelper.StartAuthSession(ctx, userName, false /*ephemeral*/, uda.AuthIntent_AUTH_INTENT_DECRYPT)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to start auth session for PIN authentication\")\n\t}\n\tdefer cryptohomeHelper.InvalidateAuthSession(ctx, authSessionID)\n\n\tif err = cryptohomeHelper.CreatePersistentUser(ctx, authSessionID); err != nil {\n\t\treturn errors.Wrap(err, \"failed to create persistent user with auth session\")\n\t}\n\n\tif err = cryptohomeHelper.PreparePersistentVault(ctx, authSessionID, false); err != nil {\n\t\treturn errors.Wrap(err, \"failed to prepare persistent user with auth session\")\n\t}\n\tdefer cryptohomeHelper.Unmount(ctx, userName)\n\n\tif userParam.useAuthFactor {\n\t\terr = cryptohomeHelper.AddAuthFactor(ctx, authSessionID, passwordAuthFactorLabel, passwordAuthFactorSecret)\n\t} else {\n\t\terr = cryptohomeHelper.AddCredentialsWithAuthSession(ctx, userName, passwordAuthFactorSecret, passwordAuthFactorLabel, authSessionID, false /*kiosk*/)\n\t}\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to add password auth factor\")\n\t}\n\n\tleCredsBeforeAdd, err := getLeCredsFromDisk(ctx, cmdRunner)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get le creds from disk before add\")\n\t}\n\n\t// Add a PIN auth factor to the user.\n\tif userParam.useLegacyAddAPIForPin {\n\t\terr = cryptohomeHelper.AddVaultKey(ctx, userName, passwordAuthFactorSecret, passwordAuthFactorLabel, correctPINSecret, authFactorLabelPIN, true)\n\t} else {\n\t\tif userParam.useAuthFactor {\n\t\t\terr = cryptohomeHelper.AddPinAuthFactor(ctx, authSessionID, authFactorLabelPIN, correctPINSecret)\n\t\t} else {\n\t\t\terr = cryptohomeHelper.AddPinCredentialsWithAuthSession(ctx, authFactorLabelPIN, correctPINSecret, authSessionID)\n\t\t}\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to add le credential\")\n\t}\n\tleCredsAfterAdd, err := getLeCredsFromDisk(ctx, cmdRunner)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"failed to get le creds from disk after add\")\n\t}\n\n\tif diff := cmp.Diff(leCredsAfterAdd, leCredsBeforeAdd); diff == \"\" {\n\t\treturn errors.Wrap(err, \"le cred file did not change after add\")\n\t}\n\treturn nil\n}", "func (us *UserRpcService) Register(ctx context.Context,\n\tin *pb.User) (*pb.User, error) {\n\n\tLog.INFO(\"Got a new user registeration request: %v\", in)\n\n\t// Get store from context, it is added by middleware.\n\tstore := ctx.Value(\"user_store\").(UserStoreService)\n\tif store == nil {\n\t\tLog.ERROR(\"Failed to read user store from context\")\n\t\treturn nil, internalError\n\t}\n\n\t// Create the user in store.\n\tuser, err := store.Create(ctx, in)\n\tif err != nil {\n\t\tLog.ERROR(\"Failed to create user in store, err=%s\", err.Error())\n\t\treturn nil, internalError\n\t}\n\n\tLog.DEBUG(\"Created user with %v\", user)\n\treturn user, nil\n}" ]
[ "0.60788596", "0.58267033", "0.5673958", "0.5545096", "0.55214345", "0.5498959", "0.54986537", "0.5447019", "0.5431856", "0.54241097", "0.54189456", "0.54034674", "0.5401712", "0.53734887", "0.53705066", "0.53661627", "0.5347052", "0.5342317", "0.5341975", "0.5338753", "0.5334135", "0.5320559", "0.531316", "0.53008926", "0.5297743", "0.5278358", "0.52772844", "0.5271561", "0.5270068", "0.5259992", "0.5256473", "0.5234568", "0.5232678", "0.5220869", "0.5211129", "0.5206682", "0.52061266", "0.5194393", "0.5193136", "0.51882946", "0.5158814", "0.51563674", "0.51518476", "0.51497424", "0.5141282", "0.5127674", "0.5120099", "0.51114726", "0.5108868", "0.5108691", "0.5103751", "0.5097776", "0.50933146", "0.5092941", "0.5090816", "0.5088455", "0.5086727", "0.50779545", "0.50734013", "0.5073309", "0.5071131", "0.5071047", "0.50686747", "0.5066896", "0.50605327", "0.5059256", "0.5053853", "0.5053703", "0.5051533", "0.5044005", "0.50359267", "0.50298464", "0.5025734", "0.5010154", "0.5005322", "0.5004337", "0.50041324", "0.5003844", "0.49876148", "0.49809587", "0.49753574", "0.4971896", "0.49682516", "0.49642378", "0.49559373", "0.49463037", "0.49393103", "0.49297452", "0.49283585", "0.49228615", "0.49217948", "0.4919154", "0.4913376", "0.49111652", "0.49092394", "0.48999774", "0.48936892", "0.48917812", "0.48864508", "0.48799732" ]
0.7111279
0
initConfig initializes the configuration for the server
func (s *Server) initConfig() (err error) { // Home directory is current working directory by default if s.HomeDir == "" { s.HomeDir, err = os.Getwd() if err != nil { return errors.Wrap(err, "Failed to get server's home directory") } } // Make home directory absolute, if not already absoluteHomeDir, err := filepath.Abs(s.HomeDir) if err != nil { return fmt.Errorf("Failed to make server's home directory path absolute: %s", err) } s.HomeDir = absoluteHomeDir // Create config if not set if s.Config == nil { s.Config = new(ServerConfig) } s.CA.server = s s.CA.HomeDir = s.HomeDir err = s.initMultiCAConfig() if err != nil { return err } revoke.SetCRLFetcher(s.fetchCRL) // Make file names absolute s.makeFileNamesAbsolute() compModeStr := os.Getenv("FABRIC_CA_SERVER_COMPATIBILITY_MODE_V1_3") if compModeStr == "" { compModeStr = "true" // TODO: Change default to false once all clients have been updated to use the new authorization header } s.Config.CompMode1_3, err = strconv.ParseBool(compModeStr) if err != nil { return errors.WithMessage(err, "Invalid value for boolean environment variable 'FABRIC_CA_SERVER_COMPATIBILITY_MODE_V1_3'") } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (bs *BusinessServer) initConfig() {\n\tcfg := config{}\n\tviper, err := cfg.init(bs.setting.Configfile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tbs.viper = viper\n}", "func initConfig() {\n\tvar config Configuration\n\tconfig.Databases = initAllDBConfig()\n\tconfig.Services = initAllServiceConfig()\n}", "func configInit() {\n\tLoadConfig(configPath, \"config\")\n\tConfig().SetDefault(\"log-level\", \"debug\")\n\tConfig().SetDefault(\"addr\", \"localhost:8081\")\n}", "func InitConfig() {\n\tglobalConfig.BackendServerAddr = \"http://localhost:8080\"\n}", "func initConfig() {\n\t// viper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tviper.SetConfigFile(configFile)\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Load config to memory.\n\tconfig.Server = viper.GetString(\"server\")\n\tif config.Server == \"\" {\n\t\tconfig.Server = defaultServer\n\t}\n\n\tconfig.LogDir = viper.GetString(\"log_dir\")\n\tif config.LogDir == \"\" {\n\t\tconfig.LogDir = defaultLogDir\n\t}\n\n\tconfig.LogLevel = viper.GetString(\"log_level\")\n\tif config.LogLevel == \"\" {\n\t\tconfig.LogLevel = defaultLogLevel\n\t}\n\n\tlogging.Init(config.LogDir, defaultLogFilename, config.LogLevel, 1, false)\n}", "func initConfig() {\n\tif RootConfig.clusterID == \"\" {\n\t\tlog.Fatal(\"A cluster id must be provided.\")\n\t}\n\tif RootConfig.internal {\n\t\tRootConfig.config = insideCluster()\n\t} else {\n\t\tRootConfig.config = outsideCluster()\n\t}\n}", "func initConfig() {\n\tcfg = config.NewConfig()\n\t// server info\n\tflag.StringVar(&cfg.ServerInfo.Ip, \"ip\", \"0.0.0.0\", \"ip address to listen\")\n\tflag.UintVar(&cfg.ServerInfo.ListenPort, \"listen_port\", 8888, \"listen port\")\n\tflag.UintVar(&cfg.ServerInfo.ManagePort, \"manage_port\", 9999, \"manage port\")\n\n\t// db info\n\tflag.StringVar(&cfg.DbInfo.DbIp, \"db_ip\", \"0.0.0.0\", \"db ip\")\n\tflag.UintVar(&cfg.DbInfo.DbPort, \"db_port\", 6033, \"db port\")\n\tflag.StringVar(&cfg.DbInfo.DbUser, \"db_user\", \"root\", \"db user\")\n\tflag.StringVar(&cfg.DbInfo.DbPassword, \"db_password\", \"root-root\", \"db password\")\n\tflag.StringVar(&cfg.DbInfo.DbName, \"db_name\", \"lottery\", \"db name\")\n\n\t// redis info\n\tflag.StringVar(&cfg.RedisInfo.RedisIp, \"redis_ip\", \"10.23.101.22\", \"redis ip\")\n\tflag.UintVar(&cfg.RedisInfo.RedisPort, \"redis_port\", 6379, \"redis port\")\n\tflag.StringVar(&cfg.RedisInfo.RedisUser, \"redis_user\", \"root\", \"redis user\")\n\tflag.StringVar(&cfg.RedisInfo.RedisPassword, \"redis_password\", \"root-root\", \"redis password\")\n\n\t// parse\n\tflag.Parse()\n}", "func (baseCfg *BaseConfig) InitConfig(configPath string) {\n\tconfig, err := LoadConfig(configPath)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_ROUTER, err = jsonparser.GetString(config, \"PN_GLOBAL_ROUTER\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_ROUTER %v:\", err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_PORTAL, err = jsonparser.GetString(config, \"PN_GLOBAL_PORTAL\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_PORTAL %v:\", err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_JWT_PASSPHRASE, err = jsonparser.GetString(config, \"PN_GLOBAL_JWT_PASSPHRASE\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_JWT_PASSPHRASE %v:\", err)\n\t}\n\n\tbaseCfg.MY_POD_NAMESPACE = os.Getenv(\"MY_POD_NAMESPACE\")\n\n\tbaseCfg.DB_PATH = os.Getenv(\"DB_PATH\")\n\tif baseCfg.DB_PATH == \"\" {\n\t\tbaseCfg.DB_PATH = \"./marvin-connector.db\"\n\t}\n\n\tbaseCfg.CONNECTOR_ADDRESS = os.Getenv(\"CONNECTOR_ADDRESS\")\n\n\tbaseCfg.NEW_PUBLISHED_POLLING_INTERVAL = os.Getenv(\"NEW_PUBLISHED_POLLING_INTERVAL\")\n\tif baseCfg.NEW_PUBLISHED_POLLING_INTERVAL == \"\" {\n\t\tbaseCfg.NEW_PUBLISHED_POLLING_INTERVAL = \"2h\"\n\t}\n\n\tb, err := strconv.ParseBool(os.Getenv(\"CHECK_UPGRADE_STATUS_ENABLED\"))\n\tif err != nil {\n\t\tlog.Printf(\"[Warning] parse bool CHECK_UPGRADE_STATUS_ENABLED failed. Not a boolean\")\n\t\tbaseCfg.CHECK_UPGRADE_STATUS_ENABLED = false\n\t} else {\n\t\tbaseCfg.CHECK_UPGRADE_STATUS_ENABLED = b\n\t}\n\n\tbaseCfg.UPGRADE_STATUS_POLLING_INTERVAL = os.Getenv(\"UPGRADE_STATUS_POLLING_INTERVAL\")\n\tif baseCfg.UPGRADE_STATUS_POLLING_INTERVAL == \"\" {\n\t\tbaseCfg.UPGRADE_STATUS_POLLING_INTERVAL = \"1m\"\n\t}\n\n\tbaseCfg.JOB_TIMEOUT = os.Getenv(\"JOB_TIMEOUT\")\n\tif baseCfg.JOB_TIMEOUT == \"\" {\n\t\tbaseCfg.JOB_TIMEOUT = \"2m\"\n\t}\n\n\tlog.Printf(\"SECRET.PN_GLOBAL_ROUTER %v\", baseCfg.PN_GLOBAL_ROUTER)\n\tlog.Printf(\"SECRET.PN_GLOBAL_PORTAL %v\", baseCfg.PN_GLOBAL_PORTAL)\n\tlog.Printf(\"ENV.MY_POD_NAMESPACE %v\", baseCfg.MY_POD_NAMESPACE)\n\tlog.Printf(\"ENV.DB_PATH %v\", baseCfg.DB_PATH)\n\tlog.Printf(\"ENV.CONNECTOR_ADDRESS %v\", baseCfg.CONNECTOR_ADDRESS)\n\tlog.Printf(\"ENV.NEW_PUBLISHED_POLLING_INTERVAL %v\", baseCfg.NEW_PUBLISHED_POLLING_INTERVAL)\n\tlog.Printf(\"ENV.UPGRADE_STATUS_POLLING_INTERVAL %v\", baseCfg.UPGRADE_STATUS_POLLING_INTERVAL)\n\tlog.Printf(\"ENV.JOB_TIMEOUT %v\", baseCfg.JOB_TIMEOUT)\n\n}", "func (e *EngineOperations) InitConfig(cfg *config.Common) {\n\te.CommonConfig = cfg\n}", "func InitConfig(t string, klogmax int, ktaskmax int, ratelimit int) {\n\tif t != \"\" {\n\t\ttarget = t\n\t}\n\n\ttempRemote, err := url.Parse(target)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tremote = tempRemote\n\tmaxCharsKrakenLog = klogmax\n\tmaxCharsFailedTask = ktaskmax\n\trateLimit = ratelimit\n}", "func init() {\n\tinitconf(configLocation)\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\n\tviper.SetConfigName(constants.APIServerConfigFile) // name of config file (without extension)\n\tviper.AddConfigPath(constants.ServerConfigPath) // adding home directory as first search path\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\n\tviper.SetConfigName(\".go-webserver\") // name of config file (without extension)\n\tviper.AddConfigPath(os.Getenv(\"HOME\")) // adding home directory as first search path\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func Init(configFile string) {\n\tinitializer := configInitializer{\n\t\tCommon: newDefaultCommonConfig(),\n\t\tServer: newDefaultServerConfig(),\n\t\tClient: newDefaultClientConfig(),\n\t}\n\n\tif configFile == \"\" {\n\t\tconfigFile = \"./cfg/dtail.json\"\n\t}\n\n\tif _, err := os.Stat(configFile); !os.IsNotExist(err) {\n\t\tinitializer.parseConfig(configFile)\n\t}\n\n\t// Assign pointers to global variables, so that we can access the\n\t// configuration from any place of the program.\n\tCommon = initializer.Common\n\tServer = initializer.Server\n\tClient = initializer.Client\n\n\tif Server.MapreduceLogFormat == \"\" {\n\t\tServer.MapreduceLogFormat = \"default\"\n\t}\n}", "func initConfig() {\n\tif verbose {\n\t\tutils.EnableVerboseMode()\n\t\tt := time.Now()\n\t\tutils.Logf(\"Executed ImportExportCLI (%s) on %v\\n\", utils.MICmd, t.Format(time.RFC1123))\n\t}\n\n\tutils.Logln(utils.LogPrefixInfo+\"Insecure:\", insecure)\n\tif insecure {\n\t\tutils.Insecure = true\n\t}\n}", "func initConfig() {\n\t// allow to read in from environment\n\tviper.SetEnvPrefix(\"voskhod\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\tif cfg.File != \"\" {\n\t\tviper.SetConfigFile(cfg.File)\n\n\t\t// do not forget to read in the config\n\t\tif err := viper.ReadInConfig(); err != nil {\n\t\t\tlog.Fatalf(errors.Wrap(err, \"cannot read config\").Error())\n\t\t}\n\t}\n\n\t// unmarshal to config\n\tif err := viper.Unmarshal(&cfg); err != nil {\n\t\tlog.Fatalf(errors.Wrap(err, \"cannot unmarshal config\").Error())\n\t}\n\n\t// config logger\n\tlogConfig(cfg)\n}", "func InitConfig() (err error) {\n\tConf = NewConfig()\n\tgconf = goconf.New()\n\tif err = gconf.Parse(confFile); err != nil {\n\t\treturn err\n\t}\n\tif err = gconf.Unmarshal(Conf); err != nil {\n\t\treturn err\n\t}\n\tvar serverIDi int64\n\tfor _, serverID := range gconf.Get(\"comet.addrs\").Keys() {\n\t\taddr, err := gconf.Get(\"comet.addrs\").String(serverID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tserverIDi, err = strconv.ParseInt(serverID, 10, 32)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tConf.Comets[int32(serverIDi)] = addr\n\t}\n\treturn\n}", "func (c *Configurations) Init() error {\n\tc.Version = Version\n\tc.Location = \"Local\"\n\tc.Debug = Debug\n\n\t// server\n\tc.Server = &Server{}\n\tc.Server.Init()\n\n\t// redis init\n\tc.RedisConf = &RedisConf{}\n\tc.RedisConf.Init()\n\n\treturn nil\n}", "func initConfig(objAPI ObjectLayer) error {\n\tif objAPI == nil {\n\t\treturn errServerNotInitialized\n\t}\n\n\tif globalEtcdClient != nil {\n\t\tctx, cancel := context.WithTimeout(context.Background(), 20*time.Second)\n\t\tresp, err := globalEtcdClient.Get(ctx, getConfigFile())\n\t\tcancel()\n\t\tif err == nil && resp.Count > 0 {\n\t\t\treturn migrateConfig()\n\t\t}\n\t} else {\n\t\tif isFile(getConfigFile()) {\n\t\t\tif err := migrateConfig(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\t// Migrates ${HOME}/.minio/config.json or config.json.deprecated\n\t\t// to '<export_path>/.minio.sys/config/config.json'\n\t\t// ignore if the file doesn't exist.\n\t\tif err := migrateConfigToMinioSys(objAPI); err != nil && !os.IsNotExist(err) {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := checkServerConfig(context.Background(), objAPI); err != nil {\n\t\tif err == errConfigNotFound {\n\t\t\t// Config file does not exist, we create it fresh and return upon success.\n\t\t\tif err = newConfig(objAPI); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif err := migrateMinioSysConfig(objAPI); err != nil {\n\t\treturn err\n\t}\n\n\treturn loadConfig(objAPI)\n}", "func initConfig() {\n}", "func initConfig() {\n}", "func initConfig() {\n}", "func initConfig() {\n}", "func initConfig() {\n}", "func initConfig() {\n}", "func (cfg *Config) init() {\n\tcfg.Version = viper.GetString(\"version\")\n\tcfg.setLogLevel(viper.GetString(\"log_level\"))\n\tcfg.AppName = viper.GetString(\"app_name\")\n\tcfg.AppShortName = viper.GetString(\"app_short_name\")\n\n\tcfg.API.UsingHttps = viper.GetBool(\"api.usingHttps\")\n\tcfg.API.Port = viper.GetInt(\"api.port\")\n\tcfg.API.AllowedMethods = viper.GetStringSlice(\"api.allowed_methods\")\n\tcfg.API.AllowedHeaders = viper.GetStringSlice(\"api.allowed_headers\")\n\tcfg.API.AllowedOrigins = viper.GetStringSlice(\"api.allowed_origins\")\n\n\tcfg.Database.Host = viper.GetString(\"database.host\")\n\tcfg.Database.Port = viper.GetInt(\"database.port\")\n\tcfg.Database.Db = viper.GetString(\"database.database\")\n\tcfg.Database.User = viper.GetString(\"database.user\")\n\tcfg.Database.Password = viper.GetString(\"database.password\")\n\tcfg.Database.SSLMode = viper.GetString(\"database.sslmode\")\n\n\tcfg.Keys.CSRFKey = viper.GetString(\"secrets.csrf\")\n\tcfg.Keys.JWTSecret = viper.GetString(\"secrets.jwtsecret\")\n\tcfg.Keys.ApiLogin = viper.GetString(\"secrets.api_login\")\n}", "func initConfig() {\n\n\tviper.SetDefault(\"server_port\", 3000)\n\tviper.SetDefault(\"database_file\", \"instagram_bot.db\")\n\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\tviper.SetConfigName(\"config\") // name of config file (without extension)\n\t\tviper.AddConfigPath(home) // call multiple times to add many search paths\n\t\tviper.AddConfigPath(\".\") // optionally look for config in the working directory\n\t\terr = viper.ReadInConfig() // Find and read the config file\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t} else {\n\n\t}\n}", "func initConfig() {\n\tif cfgFile == \"\" {\n\t\t// Use default.\n\t\tcfgFile = filepath.Join(os.Getenv(\"HOME\"), util.ConfigFileName)\n\t}\n\tviper.SetConfigFile(cfgFile)\n\n\tviper.AutomaticEnv()\n\n\tviper.SetDefault(util.SettingServer, util.DefaultAPIServer)\n\n\terr := viper.ReadInConfig()\n\tif err == nil {\n\t\tfmt.Println(\"[alauda] Using config file:\", viper.ConfigFileUsed())\n\t} else {\n\t\tfmt.Println(\"[alauda] WARNING: Unable to read config file:\", cfgFile)\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\tviper.SetConfigName(\"sensupluginssensu\")\n\t\tviper.AddConfigPath(\"/etc/sensuplugins/conf.d\")\n\t}\n\n\tviper.AutomaticEnv()\n\tif err := viper.ReadInConfig(); err == nil {\n\t} else {\n\t\tsyslogLog.WithFields(logrus.Fields{\n\t\t\t\"check\": \"sensupluginssensu\",\n\t\t\t\"client\": host,\n\t\t\t//\"version\": version.AppVersion(),\n\t\t\t\"error\": err,\n\t\t\t\"cfgFile\": cfgFile,\n\t\t}).Error(`Could not read in the configuration file.`)\n\t}\n}", "func (w *Webserver) Init(config cfg.Config, logger zap.SugaredLogger) error {\n\tvar err error\n\n\tw.config = *defaultConfig()\n\terr = config.Populate(&w.config)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Init Dynamic Selectors\n\tfor key, value := range w.config.Paths {\n\t\tvalue.pipelineSelector, err = config.NewSelector(value.Pipeline)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tw.config.Paths[key] = value\n\t}\n\tw.jobs = make(chan job.Input)\n\tw.logger = logger\n\n\tw.buildServer()\n\n\treturn nil\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".handler\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".handler\")\n\t}\n\n\t// Read in any config file\n\tviper.ReadInConfig()\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\tll, err := logrus.ParseLevel(viper.GetString(\"log_level\"))\n\tif err == nil {\n\t\tcore.SetLogLevel(ll)\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".server\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".server\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func InitConfig() Config {\n\tfmt.Println(\"Init config...\")\n\n\t// Load environment variables\n\terr := godotenv.Load(\".env\")\n\tif err != nil {\n\t\tlog.Fatal(\"Error reading .env file\", err)\n\t}\n\n\treturn Config{\n\t\tMain: Main{\n\t\t\tAPIPort: os.Getenv(\"APP_PORT\"),\n\t\t},\n\t\tDatabase: map[string]Database{\n\t\t\t\"stone_work\": Database{\n\t\t\t\tDriver: \"mysql\",\n\t\t\t\tName: os.Getenv(\"DB_NAME\"),\n\t\t\t\tUser: os.Getenv(\"DB_USERNAME\"),\n\t\t\t\tPassword: os.Getenv(\"DB_PASSWORD\"),\n\t\t\t},\n\t\t},\n\t\tAuth: JWTConfig{\n\t\t\tSigningMethod: os.Getenv(\"AUTH_SIGNING_METHOD\"),\n\t\t\tSecretToken: os.Getenv(\"AUTH_SECRET_TOKEN\"),\n\t\t\tLoginTime: os.Getenv(\"AUTH_LOGIN_TIME\"),\n\t\t},\n\t}\n}", "func initConfig() error {\n\tviper.AddConfigPath(\"configs\")\n\tviper.SetConfigName(\"config\")\n\treturn viper.ReadInConfig()\n}", "func initConfig() {\n\n\t// Configure logging\n\tconst logNameConsole = \"console\"\n\tlevel, _ := glog.NewLogLevel(rootFlags.logLevel)\n\tglog.ClearBackends()\n\tglog.SetBackend(logNameConsole, glog.NewWriterBackend(os.Stderr, \"\", level, \"\"))\n\n\t// Load config from file\n\tresticmanager.AppConfig.Load(rootFlags.appConfigFile)\n\tresticmanager.AppConfig.DryRun = rootFlags.dryrun\n\n\t// Add file logging if required\n\tif logConfig := resticmanager.AppConfig.LoggingConfig(); (!rootFlags.noFileLogging) && (logConfig != nil) {\n\t\tconst logNameFile = \"appfile\"\n\n\t\tglog.SetBackend(logNameFile,\n\t\t\tglog.NewFileBackend(\n\t\t\t\tlogConfig.Filename,\n\t\t\t\tlogConfig.Append,\n\t\t\t\t\"\",\n\t\t\t\tlogConfig.Level,\n\t\t\t\t\"\", // Use default logging format\n\t\t\t))\n\n\t}\n\n}", "func init() {\n\n\tviper.SetConfigName(\"config\") // name of config file (without extension)\n\tviper.SetConfigType(\"json\")\n\tviper.AddConfigPath(\".\") // path to look for the config file in\n\tviper.AddConfigPath(\"./config/\")\n\terr := viper.ReadInConfig() // Find and read the config file\n\tif err != nil { // Handle errors reading the config file\n\t\tpanic(fmt.Errorf(\"fatal error config file: %w\", err))\n\t}\n\n\tServer.Host = viper.GetString(\"host\")\n\tServer.DBUname = viper.GetString(\"dbuname\")\n\tServer.DBPword = viper.GetString(\"dbpword\")\n\tServer.DBAddress = viper.GetString(\"dbaddress\")\n\tServer.Port = viper.GetString(\"port\")\n\n\t// Setup global logging format\n\tlog.SetFlags(log.Ldate | log.Ltime | log.Lshortfile | log.Lmicroseconds)\n\n\t// Seed default random source\n\trand.Seed(time.Now().UnixNano())\n\n\tif !Debug.LongLog {\n\t\tlog.SetFlags(log.Ldate | log.Ltime)\n\t\tlog.Printf(\"Switching to short log format.\")\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".worker-ops\")\n\t}\n\n\tviper.AutomaticEnv()\n\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func (c *config) Init() error {\n\t// Validate the ports\n\tif c.ListenPort <= 0 || c.ListenPort > maxPort {\n\t\treturn fmt.Errorf(\"invalid listen port: %v\", c.ListenPort)\n\t}\n\tif c.DestinationPort <= 0 || c.DestinationPort > maxPort {\n\t\treturn fmt.Errorf(\"invalid destination port: %v\", c.DestinationPort)\n\t}\n\n\t// Create the listen address and destination address.\n\tc.listenAddress = fmt.Sprintf(\"%s:%v\", c.ListenHost, c.ListenPort)\n\tc.destinationAddress = fmt.Sprintf(\"%s:%v\", c.DestinationHost, c.DestinationPort)\n\n\t// Prepare the passwd path.\n\tc.PasswdFile = filepath.Clean(c.PasswdFile)\n\tif !strings.HasPrefix(c.PasswdFile, \"/\") {\n\t\tc.PasswdFile = filepath.Clean(lookupDir + \"/\" + c.PasswdFile)\n\t}\n\n\t// Check if the passwd file exists.\n\te, err := exists(c.PasswdFile)\n\tif err != nil {\n\t\treturn err\n\t} else if !e {\n\t\treturn fmt.Errorf(\"watchman passwd file '%s' is missing!\", c.PasswdFile)\n\t}\n\n\treturn nil\n}", "func InitConfig() {\n\terr := gonfig.GetConf(\"./config.json\", &Configuration)\n\tutils.Check(err)\n}", "func initConfig() {\n\t// setting up default config, in case no config file is present\n\t// if any map parameter is used, then whole map will get overwrite\n\t// that's why in case of log, log.file is used which viper does not overwrite\n\t// log.file create a internal map in result it joins it\n\tdefaultConfig()\n\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\t// Search config in home directory with name \".watermark-service\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".watermark-service\")\n\t}\n\n\t//viper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\t//log.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func (c *Config) Init() {\n\tc.Address = \"127.0.0.1:8500\"\n\tc.TagSeparator = \",\"\n\tc.Scheme = \"http\"\n}", "func initConfig() {\n\toptions.db = *flag.String(\"db\", \"test/test.db\", \"SQLite database file\")\n\toptions.host = *flag.String(\"bind\", \"localhost\", \"HTTP server host\")\n\toptions.port = *flag.Uint(\"listen\", 8000, \"HTTP server listen port\")\n\tflag.Parse()\n}", "func initConfig(centralConfig corecfg.CentralConfig) (interface{}, error) {\n\trootProps := RootCmd.GetProperties()\n\t// Parse the config from bound properties and setup gateway config\n\tgatewayConfig = &config.GatewayConfig{\n\t\tSpecPath: rootProps.StringPropertyValue(\"3scale-api-gateway.specPath\"),\n\t\tConfigKey1: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_1\"),\n\t\tConfigKey2: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_2\"),\n\t\tConfigKey3: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_3\"),\n\t}\n\n\tagentConfig := config.AgentConfig{\n\t\tCentralCfg: centralConfig,\n\t\tGatewayCfg: gatewayConfig,\n\t}\n\treturn agentConfig, nil\n}", "func Init() error {\n\tconfigServerURL, err := GetConfigServerEndpoint()\n\tif err != nil {\n\t\topenlog.Warn(\"can not get config server endpoint: \" + err.Error())\n\t\treturn err\n\t}\n\n\tvar enableSSL bool\n\ttlsConfig, tlsError := getTLSForClient(configServerURL)\n\tif tlsError != nil {\n\t\topenlog.Error(fmt.Sprintf(\"Get %s.%s TLS config failed, err:[%s]\",\n\t\t\tconfigServerName, common.Consumer, tlsError.Error()))\n\t\treturn tlsError\n\t}\n\n\t/*This condition added because member discovery can have multiple ip's with IsHTTPS\n\thaving both true and false value.*/\n\tif tlsConfig != nil {\n\t\tenableSSL = true\n\t}\n\n\tinterval := config.GetConfigServerConf().RefreshInterval\n\tif interval == 0 {\n\t\tinterval = 30\n\t}\n\n\terr = initConfigServer(configServerURL, enableSSL, tlsConfig, interval)\n\tif err != nil {\n\t\topenlog.Error(\"failed to init config server: \" + err.Error())\n\t\treturn err\n\t}\n\n\topenlog.Warn(\"config server init success\")\n\treturn nil\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\tviper.SetConfigName(\".remoteAudio\") // name of config file (without extension)\n\t\tviper.AddConfigPath(\"$HOME\") // adding home directory as first search path\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n}", "func InitConfig() (err error) {\n\tConf = NewConfig()\n\tgconf = goconf.New()\n\tif err = gconf.Parse(confFile); err != nil {\n\t\treturn err\n\t}\n\tif err := gconf.Unmarshal(Conf); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func initConfig() {\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// bind all of our flags so we can access them with viper\n\tviper.BindPFlags(RootCmd.Flags())\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n\n\t// we have to set this here since it uses other values\n\tviper.SetDefault(\"tempFolder\", \"/tmp\")\n\n\t// if we just want to print the version we do that and exit\n\tif viper.GetBool(\"version\") == true {\n\t\tfmt.Println(PrettyVersion(GetVersionParts()))\n\t\tos.Exit(0)\n\t}\n}", "func initConfig() {\n\tcredsViper = getViper(credsCfgFile, \"credentials\")\n\tconfigViper = getViper(configCfgFile, \"config\")\n\n\t// If a config file is found, read it in.\n\t_ = credsViper.ReadInConfig()\n\t_ = configViper.ReadInConfig()\n}", "func Init_Config() {\n\tconf, err := os.Open(ConfigFileName)\n\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tcreate_Config()\n\t\t\treturn\n\t\t} else {\n\t\t\tfmt.Printf(\"init_config err: %s\\n\", err);\n\t\t}\n\t}\n\n\tdefer conf.Close();\n\n\t//start decoding\n\tdecoder := json.NewDecoder(conf);\n\terr = decoder.Decode(&Config)\n\tif err != nil {\n\t\tfmt.Printf(\"init_config decoder err: %s\\n\", err);\n\t}\n}", "func initConfig() {\n\tif configFile != \"\" {\n\t\tviper.SetConfigFile(configFile)\n\t} else {\n\t\t// Search config in application directory with name {defaultConfigName} (without extension)\n\t\tviper.AddConfigPath(\".\")\n\t\tviper.SetConfigName(defaultConfigName)\n\t}\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\ttraceInfo(\"Using config file: \" + viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tviper.BindEnv(\"CNI_CONF_NAME\")\n\tviper.BindEnv(\"CNI_ETC_DIR\")\n\tviper.BindEnv(\"CNI_BIN_DIR\")\n\tviper.BindEnv(\"COIL_PATH\")\n\tviper.BindEnv(\"CNI_NETCONF_FILE\")\n\tviper.BindEnv(\"CNI_NETCONF\")\n\tviper.BindEnv(\"COIL_BOOT_TAINT\")\n\n\tviper.SetDefault(\"CNI_CONF_NAME\", defaultCniConfName)\n\tviper.SetDefault(\"CNI_ETC_DIR\", defaultCniEtcDir)\n\tviper.SetDefault(\"CNI_BIN_DIR\", defaultCniBinDir)\n\tviper.SetDefault(\"COIL_PATH\", defaultCoilPath)\n}", "func InitConfig(){\n\n\tviper.SetConfigName(\"config\")\n\tviper.AddConfigPath(\"others\")\n\n\n\tviper.SetConfigType(\"toml\")\n\terr := viper.ReadInConfig()\n\n\tif err != nil {\n\n\t\tfmt.Println(err.Error())\n\t\tfmt.Printf(\"Exiting application. got error %v\",err.Error())\n\t\tos.Exit(1)\n\t}\n\n\terr = viper.Unmarshal(&Conf)\n\tif err != nil{\n\t\tfmt.Println(err)\n\t}\n\tfmt.Printf(\"config %v\",Conf)\n\tConf.loaded = true\n\n\tlogFileMaxSize := 100\n\n\terr = logger.Init(Conf.LogConfig.LogDirectory,\n\t\t800,\n\t\t20,\n\t\tuint32(logFileMaxSize),\n\t\tConf.LogConfig.EnableLogTrace,\n\t\tConf.LogConfig.Log)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error in intializing logger, is : \", err)\n\t}\n\n\terr = logger.SetFilenamePrefix(Conf.LogConfig.LogFilePrefix, Conf.LogConfig.LogFilePrefix)\n\tlogger.SetLogThrough(false)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error is : \", err.Error())\n\t}\n\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome := findHome()\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".ccv2ctl\")\n\t}\n\tviper.SetEnvPrefix(\"CCV2\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\tif err := viper.ReadInConfig(); err == nil {\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome, err := os.UserHomeDir()\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Reading home directory: %s\", err)\n\t\t}\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".rproxy\")\n\t}\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tlog.Fatalf(\"Loading config: %s\", err)\n\t}\n\n\tproxyConfig = &config.ProxyCfg{}\n\tif err := viper.Unmarshal(proxyConfig); err != nil {\n\t\tlog.Fatalf(\"Parsing config: %s\", err)\n\t}\n\tinitLogging(verbose)\n\tlog.Infof(\"Using config: %+v\", proxyConfig)\n}", "func initConfig() {\n\tif !options.Quite {\n\t\tif !options.NoBanner {\n\t\t\tcore.Banner()\n\t\t}\n\t\tfmt.Fprintf(os.Stderr, \"Jig - Jaeles Intput Generator %v by %v\\n\", core.VERSION, core.AUTHOR)\n\t}\n\n\tif options.Helper {\n\t\tHelpMessage()\n\t\tos.Exit(0)\n\t}\n\n\tcore.InitLog(&options)\n}", "func initConfig() {\n\n\tviper.SetEnvPrefix(`HELM_API`)\n\n\te := viper.BindEnv(\n\t\t`DEBUG`,\n\t)\n\tif e != nil {\n\t\tpanic(e)\n\t}\n\n\te = viper.BindEnv(\n\t\t`LOGLEVEL`,\n\t)\n\tif e != nil {\n\t\tpanic(e)\n\t}\n\tviper.SetDefault(`LOGLEVEL`, `info`)\n\n\te = viper.BindEnv(\n\t\t`TMP`,\n\t)\n\tif e != nil {\n\t\tpanic(e)\n\t}\n\tviper.SetDefault(`TMP`, `/var/tmp/helm-api`)\n\n\te = viper.BindEnv(`PORT`)\n\tviper.SetDefault(`PORT`, `8848`)\n\tif e != nil {\n\t\tpanic(e)\n\t}\n\n\te = viper.BindEnv(`HTTP_PORT`)\n\tviper.SetDefault(`HTTP_PORT`, `8611`)\n\tif e != nil {\n\t\tpanic(e)\n\t}\n\n\tviper.AllowEmptyEnv(true)\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tutils.Error().Err(err).Send()\n\t}\n}", "func initConfig() {\n\n\t// Find home directory.\n\thome, err := homedir.Dir()\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n\n\t// Search config in home directory with name \".etherpad-cli\" (without extension).\n\tviper.AddConfigPath(home)\n\tviper.SetConfigName(configFile)\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Printf(\"error reading config file (%s): %s\\n\", viper.ConfigFileUsed(), err)\n\t\tos.Exit(0)\n\t}\n\n\t// setup etherpad client\n\tetherClient = etherpadlite.NewEtherpadLite(viper.GetString(\"etherpad_token\"))\n\tetherClient.BaseURL = viper.GetString(\"etherpad_hostname\")\n}", "func InitConfig() (DbConnection, APIServerInfo) {\n\t// Default configurations\n\tdbConnection := DbConnection{\n\t\tUser: \"tankyou_poc\",\n\t\tDatabase: \"tankyou_poc\",\n\t\tPassword: \"tankyou_poc\",\n\t\tHost: \"0.0.0.0\",\n\t\tPort: \"5432\",\n\t\tDefaultTimeZone: \"Europe/Paris\",\n\t}\n\tAPIServer := APIServerInfo{\n\t\tHostname: \"0.0.0.0\",\n\t\tPort: \"3000\",\n\t\tJWTSecretKey: \"MagicalTokenIsTheBest\",\n\t}\n\t// Default host for DB in Docker containers\n\tif os.Getenv(\"ENVTYPE\") == \"container\" {\n\t\tlog.Print(\"<><><><> Setting host to container default \\n\")\n\t\tdbConnection.Host = \"database\"\n\t}\n\t// Get values set in env\n\tif apiPort := os.Getenv(\"API_PORT\"); apiPort != \"\" {\n\t\tlog.Print(\"<><><><> Setting api port \\n\")\n\t\tAPIServer.Port = apiPort\n\t}\n\tif apiHostname := os.Getenv(\"API_HOST\"); apiHostname != \"\" {\n\t\tlog.Print(\"<><><><> Setting api hostname \\n\")\n\t\tAPIServer.Hostname = apiHostname\n\t}\n\tif jwtSecret := os.Getenv(\"JWT_SECRET\"); jwtSecret != \"\" {\n\t\tlog.Print(\"<><><><> Setting JWT secret \\n\")\n\t\tAPIServer.JWTSecretKey = jwtSecret\n\t}\n\t// Will be erased if user is not root\n\tif dbRootPassword := os.Getenv(\"MYSQL_ROOT_PASSWORD\"); dbRootPassword != \"\" {\n\t\tlog.Print(\"<><><><> Setting db root password \\n\")\n\t\tdbConnection.Password = dbRootPassword\n\t}\n\tif dbUser := os.Getenv(\"MYSQL_USER\"); dbUser != \"\" {\n\t\tlog.Print(\"<><><><> Setting db user and user password \\n\")\n\t\tdbConnection.User = dbUser\n\t\t// Can be empty. Should be define when user is define\n\t\tdbConnection.Password = os.Getenv(\"MYSQL_PASSWORD\")\n\t}\n\tif dbName := os.Getenv(\"MYSQL_DATABASE\"); dbName != \"\" {\n\t\tlog.Print(\"<><><><> Setting db name \\n\")\n\t\tdbConnection.Database = dbName\n\t}\n\tif dbPort := os.Getenv(\"MYSQL_PORT\"); dbPort != \"\" {\n\t\tlog.Print(\"<><><><> Setting db port \\n\")\n\t\tdbConnection.Port = dbPort\n\t}\n\tif dbHost := os.Getenv(\"MYSQL_HOST\"); dbHost != \"\" {\n\t\tlog.Print(\"<><><><> Setting db host \\n\")\n\t\tdbConnection.Host = dbHost\n\t}\n\tif defTimeZone := os.Getenv(\"DEFAULT_TIME_ZONE\"); defTimeZone != \"\" {\n\t\tlog.Print(\"<><><><> Setting db host \\n\")\n\t\tdbConnection.DefaultTimeZone = defTimeZone\n\t}\n\n\t// Return new configs\n\treturn dbConnection, APIServer\n}", "func InitConfig() error {\n\t// Viper\n\tinitDefaults()\n\tviper.SetEnvKeyReplacer(strings.NewReplacer(\".\", \"_\")) // For gamers only\n\tviper.AutomaticEnv()\n\t// Consul\n\tprintAll()\n\treturn nil\n}", "func initConfig() {\n\tviper.ReadInConfig()\n}", "func initConfig() {\n\t// Find home directory.\n\thome, err := os.UserHomeDir()\n\tzenDir := home + \"/.zen\"\n\n\tcobra.CheckErr(err)\n\n\t// load the config data\n\tcfg = config.InitConfig(zenDir)\n\n\t// set default exec and runner\n\tcfg.AppCfg.Executor = &plugins.DefaultExecutor{}\n\tcfg.AppCfg.Runner = &plugins.DefaultRunner{}\n\n\t// load plugin from path based on config default\n\tfor _, plugin := range cfg.Plugins.Runners {\n\t\tif plugin.Name == cfg.AppCfg.RunnerID {\n\t\t\tplugins.LoadPlugin(plugin.Path)\n\t\t\tcfg.AppCfg.Runner = plugins.ZenPluginRegistry.Runner\n\t\t}\n\t}\n\n\tfor _, plugin := range cfg.Plugins.Executors {\n\t\tif plugin.Name == cfg.AppCfg.ExecutorID {\n\t\t\tplugins.LoadPlugin(plugin.Path)\n\t\t\tcfg.AppCfg.Executor = plugins.ZenPluginRegistry.Executor\n\t\t}\n\t}\n\n}", "func InitConfig() {\n\tv := viper.New()\n\tv.AddConfigPath(\".\")\n\tv.SetConfigFile(\"config.json\")\n\tv.SetConfigType(\"json\")\n\n\t// default values\n\tv.SetDefault(\"MYSQL_SCHEMA\", \"transcription-service\")\n\tv.SetDefault(\"MYSQL_PORT\", 3306)\n\n\terr := v.ReadInConfig()\n\n\t// local config file does not exist\n\tif err != nil {\n\t\tv.BindEnv(\"MYSQL_ROOT_PWD\")\n\t\tv.BindEnv(\"MYSQL_HOSTNAME\")\n\t\tv.BindEnv(\"MYSQL_USER\")\n\t\tv.BindEnv(\"AWS_BUCKET_NAME\")\n\t} else {\n\t\t// overwrite if env is present\n\t\tv.AutomaticEnv()\n\t}\n\n\tconfig = new(Configuration)\n\n\tbucketName := v.GetString(\"AWS_BUCKET_NAME\")\n\n\tconfig.AwsBucketName = bucketName\n\n\tvar dbConfig DbConfiguration\n\tdbConfig.port = v.GetInt(\"MYSQL_PORT\")\n\tdbConfig.hostname = v.GetString(\"MYSQL_HOSTNAME\")\n\tdbConfig.username = v.GetString(\"MYSQL_USER\")\n\tdbConfig.password = v.GetString(\"MYSQL_ROOT_PWD\")\n\tdbConfig.schemaName = v.GetString(\"MYSQL_SCHEMA\")\n\n\tconfig.DbConfig = &dbConfig\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\t_, _ = fmt.Fprintln(os.Stderr, \"Using config file:\", viper.ConfigFileUsed())\n\t}\n\n\tif viper.GetBool(\"SAVE_LOGS\") {\n\t\terrorLogFile, err := os.OpenFile(viper.GetString(\"INTERNAL_LOGS_FILE\"), os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)\n\t\tif err != nil {\n\t\t\tlogrus.Fatalf(\"Cannot create or open the Error logs file: %s\", viper.GetString(\"INTERNAL_LOGS_FILE\"))\n\t\t}\n\t\tmw := io.MultiWriter(os.Stdout, errorLogFile)\n\t\tlogrus.SetOutput(mw)\n\t\tlogrus.SetFormatter(&logrus.TextFormatter{})\n\t}\n\tDisplayProcess = viper.GetBool(\"DISPLAY_INFO\")\n}", "func InitConfig() (*Config, error) {\n\tconfig := &Config{\n\t\tDatabaseURI: viper.GetString(\"DatabaseURI\"),\n\t}\n\tif config.DatabaseURI == \"\" {\n\t\treturn nil, fmt.Errorf(\"DatabaseURI must be set\")\n\t}\n\treturn config, nil\n}", "func Init() {\n\n\tconfig = Config{}\n\t//Read in configuration from a file\n\t// configFile, err := ioutil.ReadFile(\"../config.json\")\n\t// if err != nil {\n\t// \tlog.Println(err)\n\t// \tlog.Panic(\"There was an error loading the config file\")\n\t// }\n\n\t// if err := json.Unmarshal(configFile, config); err != nil {\n\t// \tlog.Panic(\"Error decoding into struct. Please check the config file\")\n\t// }\n\n\tconfig.Port = \"8000\"\n\n\tsession, err := mgo.DialWithInfo(&mgo.DialInfo{\n\t\tAddrs: []string{\"127.0.0.1:27017\", \"127.0.0.1:27018\"},\n\t\tUsername: \"\",\n\t\tPassword: \"\",\n\t})\n\tif err != nil {\n\t\tlog.Panic(\"Error connecting to database\")\n\t}\n\tconfig.Session = session\n\n}", "func (z *ZkClient) InitServiceConfig(path string, config map[string]string) {\n\n}", "func initConfig() {\n\tif privateKey != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(privateKey)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tcobra.CheckErr(err)\n\n\t\t// Search config in home directory with name \".gitserve\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".gitserve\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Fprintln(os.Stderr, \"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func (c *Config) Init() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Port\n\tc.Port = \"5000\"\n\tport, exists := os.LookupEnv(\"PORT\")\n\tif exists {\n\t\tc.Port = port\n\t}\n\n\t// Database URL\n\tconnection, exists := os.LookupEnv(\"DB_CONN\")\n\tif exists {\n\t\tc.DBConnection = connection\n\t} else {\n\t\tpanic(\"'DB_CONN' key has not been defined in .env file\")\n\t}\n\n\t// Secret key\n\tsecret, exists := os.LookupEnv(\"SECRET\")\n\tif exists {\n\t\tc.Secret = secret\n\t} else {\n\t\tpanic(\"'SECRET' key has not been defined in .env file\")\n\t}\n\n\t// Environment : default is Production\n\tc.Env = Production\n\tenv, exists := os.LookupEnv(\"ENV\")\n\tif exists && env == \"development\" {\n\t\tc.Env = Development\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\n\tviper.SetConfigName(\".takethe53\") // name of config file (without extension)\n\tviper.AddConfigPath(\"$HOME\") // adding home directory as first search path\n\tviper.SetEnvPrefix(\"takethe53\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\treadConfig := false\n\tif err := viper.ReadInConfig(); err == nil {\n\t\treadConfig = len(viper.ConfigFileUsed()) > 0\n\t}\n\n\tif viper.GetString(\"log-format\") == \"json\" {\n\t\tlogrus.SetFormatter(&logrus.JSONFormatter{})\n\t}\n\n\tif viper.GetBool(\"verbose\") {\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\t\tlogrus.Debug(\"debug on\")\n\t}\n\n\tif readConfig {\n\t\tlogrus.Debug(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func (d *Config) Init() {\n\td.Type = \"SRV\"\n\td.RefreshInterval = toml.Duration(30 * time.Second)\n}", "func initConfig() {\n\tif debug {\n\t\tlog.SetLevel(log.DebugLevel)\n\t} else {\n\t\tlog.SetLevel(log.InfoLevel)\n\t}\n\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".s3s2\" (with extension!!!).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".s3s2\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tlog.Debug(\"Using config file:\", viper.ConfigFileUsed())\n\t} else {\n\t\t//Uncomment if problems picking up config file.\n\t\t//fmt.Println(err)\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Search config in home directory with name \".handshake\" (without extension).\n\t\tviper.AddConfigPath(\".\")\n\t\tviper.SetConfigName(\"handshake\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\t// fmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() *domain.ApplicationConfig {\n\tconfigPath := util.GetEnv(envConfigPath, defaultConfigPath)\n\tprefix := util.GetEnv(envConfigPrefix, defaultConfigPrefix)\n\tcfg, err := util.ReadConfig(configPath, prefix)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn cfg\n}", "func initConfig() {\n\tappid, _ = utils.Cfg.GetString(\"wechat\", \"appid\")\n\tsecret, _ = utils.Cfg.GetString(\"wechat\", \"secret\")\n\tbaseUrl, _ := utils.Cfg.GetString(\"beego\", \"base_url\")\n\tredirectUrl = url.QueryEscape(baseUrl + \"/wechat\")\n\twechatAPIHost = \"https://api.weixin.qq.com\"\n}", "func InitConfig() {\n\tenvLoader()\n}", "func initConfig() {\n\tlog.SetHandler(text.New(os.Stderr))\n\tif debug {\n\t\tlog.SetLevel(log.DebugLevel)\n\t}\n\tviper.SetEnvPrefix(\"ecs\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\tif cfgFile != \"\" || environment != \"\" {\n\t\t// Use config file from the flag. cfgFile takes precedence over environment\n\t\tif cfgFile != \"\" {\n\t\t\tviper.SetConfigFile(cfgFile)\n\t\t} else {\n\t\t\tif cfg, err := findConfigByEnvironment(environment); err != nil {\n\t\t\t\tlog.WithError(err).Fatal(\"Can't find the config\")\n\t\t\t} else {\n\t\t\t\tviper.SetConfigFile(cfg)\n\t\t\t}\n\t\t}\n\t\t// If a config file is found, read it in.\n\t\tif err := viper.ReadInConfig(); err == nil {\n\t\t\tlog.Infof(\"Using config file: %s\", viper.ConfigFileUsed())\n\t\t} else {\n\t\t\tlog.WithError(err).Fatal(\"Had some errors while parsing the config\")\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\n}", "func InitConfig() (*Config, error) {\n\tv := viper.New()\n\tv.AutomaticEnv()\n\n\tc := Config{\n\t\tHTTPPort: v.GetString(httpPort),\n\t\tPostgreSQLConfig: PostgreSQLConfig{\n\t\t\tHost: v.GetString(postgreSQLHost),\n\t\t\tUser: v.GetString(postgreSQLUser),\n\t\t\tPassword: v.GetString(postgreSQLPassword),\n\t\t\tDB: v.GetString(postgreSQLDB),\n\t\t\tPort: v.GetInt64(postgreSQLPort),\n\t\t\tSubscriptionsTable: PostgreSQLTableConfig{\n\t\t\t\tSchema: v.GetString(postgreSQLSubscriptionsTableSchema),\n\t\t\t\tName: v.GetString(postgreSQLSubscriptionsTableName),\n\t\t\t},\n\t\t},\n\t}\n\n\tif err := validateConfig(v); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &c, nil\n}", "func (s *RPCConfiguration) Init() error {\n\ts.NotificationsServerPort = env_config.Env(\"VELMIE_WALLET_NOTIFICATIONS_RPC_PORT\", \"\")\n\treturn nil\n}", "func initConfig() {\n\tif rootFlags.cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(rootFlags.cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tlog.Print(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".eqc\" (without extension).\n\t\tviper.AddConfigPath(filepath.Join(home, \".config\"))\n\t\tviper.SetConfigName(\"eqc\")\n\t}\n\n\tif !strings.Contains(rootFlags.svcAddr, \":\") {\n\t\trootFlags.svcAddr += \":37706\"\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tlog.Print(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\terr := gApp.Config.Load(gApp.Context())\n\tif err == nil {\n\t\treturn\n\t}\n\tif _, ok := err.(*fs.PathError); !ok {\n\t\tfmt.Fprintf(os.Stderr, \"failed loading config file: %v\\n\", err)\n\t}\n}", "func InitConfig() {\n\n\tviper.SetDefault(\"Threads\", \"4\")\n\tviper.SetDefault(\"RootDirectory\", \"/opt/lht\")\n\tviper.SetConfigName(\"lht\") // name of config file (without extension)\n\tviper.AddConfigPath(\"/etc\") // adding /etc directory as first search path\n\n\tif !project.CheckInstalled() {\n\t\tfmt.Fprintf(os.Stderr, \"lht is not configured.. running installation.\\n\")\n\t\tif err := project.Install(); err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"could not install: %v\\n\", err)\n\t\t\tif err == project.ErrNotRoot {\n\t\t\t\tfmt.Fprintf(os.Stderr, \"please run `sudo lht` to install default files\")\n\t\t\t}\n\t\t\tos.Exit(1)\n\t\t}\n\t\tfmt.Fprintf(os.Stderr, \"lht has been installed.\\n\")\n\t\tos.Exit(0)\n\t}\n\n\t// config file should be found since we have checked the installation\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Fprintf(os.Stderr, \"could not read configuration file: %v\\n\", err)\n\t\tos.Exit(1)\n\t}\n}", "func init() {\n\tconfig.Read()\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n}", "func init() {\n\tconfig.Read()\n\n\tdao.Server = config.Server\n\tdao.Database = config.Database\n\tdao.Connect()\n}", "func initConfig() {\n\tif viper.GetBool(\"verbose\") {\n\t\tlog.SetLevel(log.DebugLevel)\n\t}\n\n\t// Set defaults\n\tviper.SetDefault(\"metrics.addr\", \":9100\")\n\tviper.SetDefault(\"metrics.path\", \"/metrics\")\n\tviper.SetDefault(\"redis.pass\", \"\")\n\tviper.SetDefault(\"kafka.tls\", false)\n\tviper.SetDefault(\"kafka.topics.tasks\", \"tasks\")\n\tviper.SetDefault(\"kafka.topics.jobs\", \"jobs\")\n\tviper.SetDefault(\"kafka.topics.states\", \"states\")\n\tviper.SetDefault(\"kafka.groups.schedulers\", \"schedulers\")\n\tviper.SetDefault(\"kafka.groups.aggregators\", \"aggregators\")\n\tviper.SetDefault(\"kafka.groups.workers\", \"workers\")\n\tviper.SetDefault(\"worker.poolsize\", 100)\n\tviper.SetDefault(\"token.ttl\", 3600)\n\tviper.SetDefault(\"redis.pass\", \"\")\n\n\t// Bind environment variables\n\tviper.SetEnvPrefix(\"mtrwrk\")\n\tviper.AutomaticEnv()\n\n\t// Set config search path\n\tviper.AddConfigPath(\"/etc/metronome/\")\n\tviper.AddConfigPath(\"$HOME/.metronome\")\n\tviper.AddConfigPath(\".\")\n\n\t// Load default config\n\tviper.SetConfigName(\"default\")\n\tif err := viper.MergeInConfig(); err != nil {\n\t\tif _, ok := err.(viper.ConfigFileNotFoundError); ok {\n\t\t\tlog.Debug(\"No default config file found\")\n\t\t} else {\n\t\t\tlog.Panicf(\"Fatal error in default config file: %v \\n\", err)\n\t\t}\n\t}\n\n\t// Load worker config\n\tviper.SetConfigName(\"worker\")\n\tif err := viper.MergeInConfig(); err != nil {\n\t\tif _, ok := err.(viper.ConfigFileNotFoundError); ok {\n\t\t\tlog.Debug(\"No worker config file found\")\n\t\t} else {\n\t\t\tlog.Panicf(\"Fatal error in worker config file: %v \\n\", err)\n\t\t}\n\t}\n\n\t// Load user defined config\n\tcfgFile := viper.GetString(\"config\")\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t\terr := viper.ReadInConfig()\n\t\tif err != nil {\n\t\t\tlog.Panicf(\"Fatal error in config file: %v \\n\", err)\n\t\t}\n\t}\n}", "func initConfig() {\n\thome, err := homedir.Dir()\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n\n\ttdPath := path.Join(home, \".td\")\n\tif _, err := os.Stat(tdPath); os.IsNotExist(err) {\n\t\tos.MkdirAll(tdPath, os.ModePerm)\n\t}\n\n\tviper.AddConfigPath(tdPath)\n\tviper.Set(\"tdPath\", tdPath)\n\tviper.SetConfigName(\".config\")\n\tviper.AutomaticEnv()\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tviper.WriteConfigAs(path.Join(tdPath, \".config.json\"))\n\t}\n\n\tviper.SetDefault(\"boards_file\", path.Join(tdPath, \"boards.json\"))\n\tviper.SetDefault(\"current_board\", \"default\")\n}", "func Init(baseCfg BaseConfig) Config {\n\t// urls := []string{}\n\n\tappConfig := Config{\n\t\tbaseCfg,\n\t\t// urls,\n\t}\n\n\treturn appConfig\n}", "func (c *Config) init() {\n\n\tc.logger = logrus.New()\n\n\t// Connect slots\n\tc.ConnectStringSet(func(key string, val string) {\n\t\tc.SetString(key, val)\n\t})\n\tc.ConnectBoolSet(func(key string, val bool) {\n\t\tc.SetBool(key, val)\n\t})\n\tc.ConnectStringValue(func(key string) string {\n\t\treturn c.GetString(key)\n\t})\n\tc.ConnectBoolValue(func(key string) bool {\n\t\treturn c.GetBool(key)\n\t})\n\tc.ConnectSave(func() {\n\t\tc.save()\n\t})\n\tc.ConnectDefaults(func() {\n\t\tc.SetDefaults()\n\t})\n}", "func (s *Server) Init(c Configuration) (o *Server, err error) {\n\to = s\n\n\t// Init UDP server\n\tif err = o.serverUDP.Init(c); err != nil {\n\t\treturn\n\t}\n\n\t// Init HTTP server\n\tif err = o.serverHTTP.Init(c); err != nil {\n\t\treturn\n\t}\n\treturn\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\tviper.AddConfigPath(defaults.EtcPath)\n\t\tviper.AddConfigPath(\".\")\n\t\tviper.SetConfigName(release.NAME)\n\t}\n\n\tviper.AutomaticEnv()\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tf := viper.ConfigFileUsed()\n\t\tif f != \"\" {\n\t\t\tlog.Fatal().Err(err).Str(\"config_file\", f).Msg(\"Unable to load config file\")\n\t\t}\n\t}\n}", "func initConfig() {\n\tconfigSuffix := \"\"\n\tswitch viper.Get(\"APP_ENV\") {\n\tcase \"development\":\n\t\tconfigSuffix = \".development\"\n\t\tbreak\n\tdefault:\n\t\tbreak\n\t}\n\n\tviper.SetConfigType(\"yaml\")\n\tviper.AddConfigPath(viper.Get(\"ROOM_CALC_CONFIG_DIRECTORY\").(string))\n\tviper.SetConfigName(\"gutils.config\" + configSuffix)\n\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Println(\"Can't read config:\", err)\n\t\tos.Exit(1)\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\n\tviper.SetConfigName(\".ttn\") // name of config file (without extension)\n\tviper.AddConfigPath(\"$HOME\") // adding home directory as first search path\n\tviper.SetEnvPrefix(\"ttn\") // set environment prefix\n\tviper.SetEnvKeyReplacer(strings.NewReplacer(\".\", \"_\", \"-\", \"_\"))\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\tviper.BindEnv(\"debug\")\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".compote\")\n\t}\n\n\tviper.AutomaticEnv()\n\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\n\tviper.SetConfigName(\".k8s-updater\") // name of config file (without extension)\n\tviper.AddConfigPath(\"$HOME\") // adding home directory as first search path\n\tviper.SetEnvPrefix(\"APP\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\tviper.SetEnvPrefix(\"pgm\")\n\tviper.SetConfigName(\"pgm\")\n\tviper.AddConfigPath(\".\")\n\tviper.AddConfigPath(\"$HOME\")\n\tviper.AddConfigPath(\"/etc/\")\n\tviper.AutomaticEnv()\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile == \"/dev/null\" {\n\t\treturn\n\t}\n\n\tviper.SetConfigName(AppName) // name of config file (without extension)\n\tviper.AddConfigPath(\"$HOME/.config/\" + AppName)\n\tviper.AddConfigPath(\"$HOME/.\" + AppName)\n\n\t// Read in environment variables that match, with a prefix\n\tviper.SetEnvPrefix(AppName)\n\tviper.AutomaticEnv()\n\n\t// Enable ability to specify config file via flag\n\tviper.SetConfigFile(cfgFile)\n\n\t// If a config file is found, read it in.\n\terr := viper.ReadInConfig()\n\tif err != nil {\n\t\tif cfgFile != \"\" {\n\t\t\terrPrint(\"Error: cannot read configuration file '%s': %v\", cfgFile, err)\n\t\t\tos.Exit(-1)\n\t\t}\n\t} else if viper.GetBool(\"verbose\") {\n\t\terrPrint(\"Using config file: %s\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".kubeconnect\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".kubeconnect\")\n\t}\n\n\tviper.SetEnvPrefix(\"KUBECONNECT\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".sgacrawl\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".sgacrawl\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func InitConfig() (*Config, error) {\n\tv := viper.New()\n\n\terr := godotenv.Load()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tv.SetDefault(\"db_file\", \"db.sqlite3\")\n\tv.SetDefault(\"notification_timeout\", 10)\n\tv.SetDefault(\"monobank_integration\", false)\n\n\tv.SetConfigName(\"config\")\n\tv.AddConfigPath(\".\")\n\tv.AutomaticEnv()\n\n\terr = v.ReadInConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tconfig := &Config{}\n\tconfig.NotificationTimeout = time.Duration(v.GetInt(\"notification_timeout\")) * time.Second\n\tconfig.DbFile = v.GetString(\"db_file\")\n\tconfig.UserName1 = v.GetString(\"USERNAME_1\")\n\tconfig.UserName2 = v.GetString(\"USERNAME_2\")\n\tconfig.SenderID1 = v.GetInt(\"SENDER_ID_1\")\n\tconfig.SenderID2 = v.GetInt(\"SENDER_ID_2\")\n\tconfig.TelegramToken = os.Getenv(\"TELEGRAM_TOKEN\")\n\tconfig.APIServer = v.GetInt(\"API_SERVER_PORT\")\n\n\tconfig.MonobankIntegrationEnabled = v.GetBool(\"monobank_integration\")\n\tif config.MonobankIntegrationEnabled {\n\t\tconfig.MonobankWebhookURL = os.Getenv(\"MONOBANK_WEBHOOK_URL\")\n\t\tconfig.MonobankPort = v.GetInt(\"MONOBANK_PORT\")\n\t\tconfig.MonobankToken1 = os.Getenv(\"MONOBANK_TOKEN_1\")\n\t\tconfig.MonobankToken2 = os.Getenv(\"MONOBANK_TOKEN_2\")\n\t\tconfig.MonobankAccount1 = v.GetString(\"MONOBANK_ACCOUNT_1\")\n\t\tconfig.MonobankAccount2 = v.GetString(\"MONOBANK_ACCOUNT_2\")\n\t}\n\n\treturn config, nil\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".htindex\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".htindex\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\n\tviper.SetConfigType(\"yaml\")\n\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\t// Search config in home directory with name \".lhctl\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".lhctl\")\n\t}\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tlog.Printf(\"Using config file: %s\", viper.ConfigFileUsed())\n\t}\n}" ]
[ "0.7635541", "0.74710584", "0.7462024", "0.7440421", "0.7333414", "0.73153716", "0.7142967", "0.7052948", "0.7020853", "0.700271", "0.6976572", "0.6970516", "0.6968374", "0.6919963", "0.6860119", "0.6859224", "0.68516105", "0.6822989", "0.6806939", "0.6792894", "0.6792894", "0.6792894", "0.6792894", "0.6792894", "0.6792894", "0.67807996", "0.6779889", "0.67748433", "0.67629045", "0.6750347", "0.674272", "0.67417216", "0.67352575", "0.6727324", "0.67222863", "0.6720046", "0.6715192", "0.67147875", "0.67068803", "0.6700753", "0.66903186", "0.6683824", "0.6672255", "0.66690105", "0.66667575", "0.6665889", "0.6664817", "0.66634095", "0.6657836", "0.6637149", "0.66348416", "0.66312045", "0.6629682", "0.6625958", "0.6623398", "0.66220045", "0.6619548", "0.66148716", "0.65999866", "0.6597099", "0.6594189", "0.6576194", "0.6570267", "0.6567695", "0.6565778", "0.65504056", "0.6537783", "0.6536941", "0.65262425", "0.65242517", "0.65206844", "0.6516435", "0.6506996", "0.6485734", "0.6481753", "0.64809996", "0.64795804", "0.64703923", "0.64690506", "0.64635867", "0.64625776", "0.64604", "0.64604", "0.64585453", "0.64566755", "0.6455854", "0.64548653", "0.64362437", "0.64325184", "0.64253986", "0.6408657", "0.6405445", "0.6398314", "0.63958055", "0.6392444", "0.6391143", "0.63867015", "0.6386333", "0.63853884", "0.6383313" ]
0.72934073
6
Initialize config related to multiple CAs
func (s *Server) initMultiCAConfig() (err error) { cfg := s.Config if cfg.CAcount != 0 && len(cfg.CAfiles) > 0 { return errors.New("The --cacount and --cafiles options are mutually exclusive") } if cfg.CAcfg.Intermediate.ParentServer.URL != "" && cfg.CAcount > 0 { return errors.New("The --cacount option is not permissible for an intermediate server; use the --cafiles option instead") } cfg.CAfiles, err = util.NormalizeFileList(cfg.CAfiles, s.HomeDir) if err != nil { return err } // Multi-CA related configuration initialization s.caMap = make(map[string]*CA) if cfg.CAcount >= 1 { s.createDefaultCAConfigs(cfg.CAcount) } if len(cfg.CAfiles) != 0 { log.Debugf("Default CA configuration, if necessary, will be used to replace missing values for additional CAs: %+v", s.Config.CAcfg) log.Debugf("Additional CAs to be started: %s", cfg.CAfiles) caFiles := util.NormalizeStringSlice(cfg.CAfiles) for _, caFile := range caFiles { err = s.loadCA(caFile, false) if err != nil { return err } } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (d *aciDriver) Config() map[string]string {\n\treturn map[string]string{\n\t\t\"CNAB_AZURE_VERBOSE\": \"Increase verbosity. true, false are supported values\",\n\t\t\"CNAB_AZURE_CLIENT_ID\": \"AAD Client ID for Azure account authentication - used to authenticate to Azure for ACI creation\",\n\t\t\"CNAB_AZURE_CLIENT_SECRET\": \"AAD Client Secret for Azure account authentication - used to authenticate to Azure for ACI creation\",\n\t\t\"CNAB_AZURE_TENANT_ID\": \"Azure AAD Tenant Id Azure account authentication - used to authenticate to Azure for ACI creation\",\n\t\t\"CNAB_AZURE_SUBSCRIPTION_ID\": \"Azure Subscription Id - this is the subscription to be used for ACI creation, if not specified the default subscription is used\",\n\t\t\"CNAB_AZURE_APP_ID\": \"Azure Application Id - this is the application to be used to authenticate to Azure\",\n\t\t\"CNAB_AZURE_RESOURCE_GROUP\": \"The name of the existing Resource Group to create the ACI instance in, if not specified a Resource Group will be created\",\n\t\t\"CNAB_AZURE_LOCATION\": \"The location to create the ACI Instance in\",\n\t\t\"CNAB_AZURE_NAME\": \"The name of the ACI instance to create - if not specified a name will be generated\",\n\t\t\"CNAB_AZURE_DELETE_RESOURCES\": \"Delete RG and ACI instance created - default is true useful to set to false for debugging - only deletes RG if it was created by the driver\",\n\t\t\"CNAB_AZURE_MSI_TYPE\": \"This can be set to user or system\",\n\t\t\"CNAB_AZURE_SYSTEM_MSI_ROLE\": \"The role to be asssigned to System MSI User - used if CNAB_AZURE_ACI_MSI_TYPE == system, if this is null or empty then the role defaults to contributor\",\n\t\t\"CNAB_AZURE_SYSTEM_MSI_SCOPE\": \"The scope to apply the role to System MSI User - will attempt to set scope to the Resource Group that the ACI Instance is being created in if not set\",\n\t\t\"CNAB_AZURE_USER_MSI_RESOURCE_ID\": \"The resource Id of the MSI User - required if CNAB_AZURE_ACI_MSI_TYPE == User \",\n\t\t\"CNAB_AZURE_PROPAGATE_CREDENTIALS\": \"If this is set to true the credentials used to Launch the Driver are propagated to the invocation image in an ENV variable, the CNAB_AZURE prefix will be relaced with AZURE_, default is false\",\n\t\t\"CNAB_AZURE_USE_CLIENT_CREDS_FOR_REGISTRY_AUTH\": \"If this is set to true the CNAB_AZURE_CLIENT_ID and CNAB_AZURE_CLIENT_SECRET are also used for authentication to ACR\",\n\t\t\"CNAB_AZURE_REGISTRY_USERNAME\": \"The username for authenticating to the container registry\",\n\t\t\"CNAB_AZURE_REGISTRY_PASSWORD\": \"The password for authenticating to the container registry\",\n\t\t\"CNAB_AZURE_STATE_FILESHARE\": \"The File Share for Azure State volume\",\n\t\t\"CNAB_AZURE_STATE_STORAGE_ACCOUNT_NAME\": \"The Storage Account for the Azure State File Share\",\n\t\t\"CNAB_AZURE_STATE_STORAGE_ACCOUNT_KEY\": \"The Storage Key for the Azure State File Share\",\n\t\t\"CNAB_AZURE_STATE_MOUNT_POINT\": \"The mount point location for state volume\",\n\t\t\"CNAB_AZURE_DELETE_OUTPUTS_FROM_FILESHARE\": \"Any Outputs Created in the fileshare are deleted on completion\",\n\t\t\"CNAB_AZURE_DEBUG_CONTAINER\": \"Replaces /cnab/app/run with tail -f /dev/null so that container can be connected to and debugged\",\n\t}\n}", "func ArbitrateConfigs(c *Configure) {\n\t//check the ClusterName, ClusterName is used to Identify the clusters in the Local NetWork\n\tif c.HttpPort == c.MsgPort {\n\t\tpanic(\"port conflict\")\n\t}\n\tif c.HttpPort > math.MaxInt16 || c.HttpPort < 1024 {\n\t\tpanic(fmt.Errorf(\"illegal http port %d\", c.HttpPort))\n\t}\n\n\tif c.MsgPort > math.MaxInt16 || c.MsgPort < 1024 {\n\t\tpanic(fmt.Errorf(\"illegal msg port %d\", c.MsgPort))\n\t}\n\n\tif c.Retry > 10 {\n\t\tc.Retry = 10\n\t}\n\tif c.Retry < 1 {\n\t\tc.Retry = 1\n\t}\n\tif c.SyncType < 0 || c.SyncType > 2 {\n\t\tc.SyncType = 0\n\t}\n\tif c.Threshold < 1000 {\n\t\tc.Threshold = 1000\n\t}\n\tif c.Threshold > 1000000 {\n\t\tc.Threshold = 1000000\n\t}\n}", "func init() {\n\tinitconf(configLocation)\n}", "func InitConfig() (err error) {\n\tConf = NewConfig()\n\tgconf = goconf.New()\n\tif err = gconf.Parse(confFile); err != nil {\n\t\treturn err\n\t}\n\tif err = gconf.Unmarshal(Conf); err != nil {\n\t\treturn err\n\t}\n\tvar serverIDi int64\n\tfor _, serverID := range gconf.Get(\"comet.addrs\").Keys() {\n\t\taddr, err := gconf.Get(\"comet.addrs\").String(serverID)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tserverIDi, err = strconv.ParseInt(serverID, 10, 32)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tConf.Comets[int32(serverIDi)] = addr\n\t}\n\treturn\n}", "func initConfig() {\n\tif RootConfig.clusterID == \"\" {\n\t\tlog.Fatal(\"A cluster id must be provided.\")\n\t}\n\tif RootConfig.internal {\n\t\tRootConfig.config = insideCluster()\n\t} else {\n\t\tRootConfig.config = outsideCluster()\n\t}\n}", "func (c *Config) Init() {\n\tif c.DNSNames == nil {\n\t\tc.DNSNames = []string{\"localhost\", \"127.0.0.1\", \"::1\"}\n\t}\n\tif c.TLS == nil {\n\t\tc.TLS = &DefaultTLSOptions\n\t}\n\tif c.AuthorityConfig == nil {\n\t\tc.AuthorityConfig = &AuthConfig{}\n\t}\n\tif c.CommonName == \"\" {\n\t\tc.CommonName = \"Step Online CA\"\n\t}\n\tif c.CRL != nil && c.CRL.Enabled && c.CRL.CacheDuration == nil {\n\t\tc.CRL.CacheDuration = DefaultCRLCacheDuration\n\t}\n\tc.AuthorityConfig.init()\n}", "func init() {\n\tfor group, values := range defaultConfigs {\n\t\tcore.RegisterConfig(group, values)\n\t}\n\tcore.RegisterService(\"indicator\", indicator.Configs, &indicator.IndicatorServiceFactory{})\n\tcore.RegisterService(\"executor\", executor.Configs, &executor.ExecutorServiceFactory{})\n}", "func initConfig() {\n\tcfgBldr := &config.ConfigurationBuilder{}\n\tSettings = &accountControllerConfiguration{}\n\tif err := cfgBldr.Unmarshal(Settings); err != nil {\n\t\tlog.Fatalf(\"Could not load configuration: %s\", err.Error())\n\t}\n\n\t// load up the values into the various settings...\n\terr := cfgBldr.WithEnv(\"AWS_CURRENT_REGION\", \"AWS_CURRENT_REGION\", \"us-east-1\").Build()\n\tif err != nil {\n\t\tlog.Printf(\"Error: %+v\", err)\n\t}\n\tsvcBldr := &config.ServiceBuilder{Config: cfgBldr}\n\n\t_, err = svcBldr.\n\t\t// AWS services...\n\t\tWithDynamoDB().\n\t\tWithSTS().\n\t\tWithS3().\n\t\tWithSNS().\n\t\tWithSQS().\n\t\t// DCE services...\n\t\tWithStorageService().\n\t\tWithDataService().\n\t\tBuild()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tServices = svcBldr\n\n\tpolicyName = Config.GetEnvVar(\"PRINCIPAL_POLICY_NAME\", \"DCEPrincipalDefaultPolicy\")\n\tartifactsBucket = Config.GetEnvVar(\"ARTIFACTS_BUCKET\", \"DefaultArtifactBucket\")\n\tprincipalPolicyS3Key = Config.GetEnvVar(\"PRINCIPAL_POLICY_S3_KEY\", \"DefaultPrincipalPolicyS3Key\")\n\tprincipalRoleName = Config.GetEnvVar(\"PRINCIPAL_ROLE_NAME\", \"DCEPrincipal\")\n\tprincipalIAMDenyTags = strings.Split(Config.GetEnvVar(\"PRINCIPAL_IAM_DENY_TAGS\", \"DefaultPrincipalIamDenyTags\"), \",\")\n\tprincipalMaxSessionDuration = int64(Config.GetEnvIntVar(\"PRINCIPAL_MAX_SESSION_DURATION\", 100))\n\ttags = []*iam.Tag{\n\t\t{Key: aws.String(\"Terraform\"), Value: aws.String(\"False\")},\n\t\t{Key: aws.String(\"Source\"), Value: aws.String(\"github.com/Optum/dce//cmd/lambda/accounts\")},\n\t\t{Key: aws.String(\"Environment\"), Value: aws.String(Config.GetEnvVar(\"TAG_ENVIRONMENT\", \"DefaultTagEnvironment\"))},\n\t\t{Key: aws.String(\"Contact\"), Value: aws.String(Config.GetEnvVar(\"TAG_CONTACT\", \"DefaultTagContact\"))},\n\t\t{Key: aws.String(\"AppName\"), Value: aws.String(Config.GetEnvVar(\"TAG_APP_NAME\", \"DefaultTagAppName\"))},\n\t}\n\taccountCreatedTopicArn = Config.GetEnvVar(\"ACCOUNT_CREATED_TOPIC_ARN\", \"DefaultAccountCreatedTopicArn\")\n\tresetQueueURL = Config.GetEnvVar(\"RESET_SQS_URL\", \"DefaultResetSQSUrl\")\n\tallowedRegions = strings.Split(Config.GetEnvVar(\"ALLOWED_REGIONS\", \"us-east-1\"), \",\")\n}", "func Init() error {\n\t// Logger = elog.DefaultLogger\n\tC.Github = &C.GithubOauth{}\n\tC.Facebook = &C.FacebookOauth{}\n\tC.Minio = &C.MinioConfig{}\n\tC.Seq = &C.Sequence{}\n\tC.JWT = &C.JWTConfig{}\n\n\terr := econf.UnmarshalKey(\"ceres.oauth.github\", C.Github)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = econf.UnmarshalKey(\"ceres.oauth.facebook\", C.Facebook)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = econf.UnmarshalKey(\"ceres.minio\", C.Minio)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = econf.UnmarshalKey(\"ceres.sequence\", C.Seq)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = econf.UnmarshalKey(\"ceres.jwt\", C.JWT)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func initConfig() {\n\tviper.BindEnv(\"CNI_CONF_NAME\")\n\tviper.BindEnv(\"CNI_ETC_DIR\")\n\tviper.BindEnv(\"CNI_BIN_DIR\")\n\tviper.BindEnv(\"COIL_PATH\")\n\tviper.BindEnv(\"CNI_NETCONF_FILE\")\n\tviper.BindEnv(\"CNI_NETCONF\")\n\tviper.BindEnv(\"COIL_BOOT_TAINT\")\n\n\tviper.SetDefault(\"CNI_CONF_NAME\", defaultCniConfName)\n\tviper.SetDefault(\"CNI_ETC_DIR\", defaultCniEtcDir)\n\tviper.SetDefault(\"CNI_BIN_DIR\", defaultCniBinDir)\n\tviper.SetDefault(\"COIL_PATH\", defaultCoilPath)\n}", "func (c *AuthConfig) init() {\n\tif c.Provisioners == nil {\n\t\tc.Provisioners = provisioner.List{}\n\t}\n\tif c.Template == nil {\n\t\tc.Template = &ASN1DN{}\n\t}\n\tif c.Backdate == nil {\n\t\tc.Backdate = &provisioner.Duration{\n\t\t\tDuration: DefaultBackdate,\n\t\t}\n\t}\n}", "func (a *AppAQI) Init(cfg app.AppConfig) {\n\ta.app_id = cfg.AppID\n\n\t// parse config\n\tjs, _ := json.Marshal(cfg.Config)\n\tjson.Unmarshal(js, &a.cfg)\n\n\ta.interest_map = make(common.InterestMap)\n\n\ta.tasks = utils.LoadTasksFromFile(a.cfg.TaskPath)\n\tfor t, _ := range a.tasks {\n\t\tvar task = a.tasks[t]\n\t\ttask.TaskTimeSeconds = 20\n\t\ta.tasks[t] = task\n\t}\n\ta.add_tasks()\n\n\ta.ground_truth = utils.LoadGroundTruth(a.cfg.GroundTruthPath)\n}", "func initClients(instanceCreds map[string]AuthBundle, threadPoolSize int) {\n\tvaultClients = make(map[string]*api.Client) // THIS IS THE GLOBAL\n\tmasterAddress := configureMaster(instanceCreds)\n\tbwg := utils.NewBoundedWaitGroup(threadPoolSize)\n\tvar mutex = &sync.Mutex{}\n\t// read access credentials for other vault instances and configure clients\n\tfor addr, bundle := range instanceCreds {\n\t\t// client already configured separately for master\n\t\tif addr != masterAddress {\n\t\t\tbwg.Add(1)\n\t\t\tgo createClient(addr, masterAddress, bundle, &bwg, mutex)\n\t\t}\n\t}\n\tbwg.Wait()\n}", "func initConfig() {\n\n\tif ver {\n\t\t_, err := fmt.Fprintln(os.Stdout, formatVersion(version.New(), false))\n\t\tif err != nil {\n\t\t\tlogrus.Fatal(err)\n\t\t}\n\n\t\tos.Exit(0)\n\t}\n\n\tif configFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(configFile)\n\t}\n\n\tviper.SetConfigName(\"apphc\") // name of config file (without extension)\n\tviper.AddConfigPath(\"/opt/cisco/apphc/config\")\n\tviper.AddConfigPath(appcommon.ApphcHomePath) // adding home directory as first search path\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tlogrus.Fatal(err)\n\t}\n\n\tlogrus.Info(\"Config file: \", viper.ConfigFileUsed())\n\n\t// Set prefix for environment variables\n\t// It will check for a environment variable with a name matching the key\n\t// uppercased and prefixed with the EnvPrefix if set.\n\tviper.SetEnvPrefix(\"apphc\")\n\n\t// Bind keys to appropriate environment variables\n\terr := viper.BindEnv(appcommon.EnvApphcLogFormat,\n\t\tappcommon.EnvApphSvcsUrlExternalIp,\n\t\tappcommon.EnvApphExternalIp,\n\t\tappcommon.EnvApphcNetworkPort,\n\t\tappcommon.EnvApphcCachePath,\n\t\tappcommon.EnvApphcAppsUpgradePolicyRecreate,\n\t\tappcommon.EnvApphcAppsRollbackEnabled,\n\t\tappcommon.EnvApphcInternalAuthorizationEnabled,\n\t\tappcommon.EnvApphcBearerToken,\n\t\tappcommon.EnvApphcPrivateDockerRegistry,\n\t\tappcommon.EnvApphcAppFlexApiHost,\n\t\tappcommon.EnvApphcAppFlexApiPort,\n\t\tappcommon.EnvApphcAdaptersRancherEnabled,\n\t\tappcommon.EnvApphMasterNodeUser,\n\t\tappcommon.EnvApphMasterNodeIp,\n\t\tappcommon.EnvApphcPurgeAppMetadata,\n\t\trancher.EnvApphcAdaptersRancherClusterName,\n\t\trancher.EnvApphcAdaptersRancherServerEndpoint,\n\t\trancher.EnvApphcAdaptersRancherServerCredsToken,\n\t\trancher.EnvApphcAdaptersRancherCatalogProto,\n\t\trancher.EnvApphcAdaptersRancherCatalogPassword,\n\t\trancher.EnvApphcAdaptersRancherTemplatesCatalogBranch,\n\t\trancher.EnvApphcAdaptersRancherAppsCatalogName,\n\t\trancher.EnvApphcAdaptersRancherAppsCatalogBranch)\n\n\tif err != nil {\n\t\tlogrus.Fatal(err)\n\t}\n\n\t// Set default values\n\tviper.SetDefault(appcommon.EnvApphcLogFormat, appcommon.LogFormatText)\n\tviper.SetDefault(appcommon.EnvApphcNetworkPort, 10000)\n\tviper.SetDefault(appcommon.EnvApphcCachePath, \"/tmp/.cache\")\n\tviper.SetDefault(appcommon.EnvApphcAppsUpgradePolicyRecreate, true)\n\tviper.SetDefault(appcommon.EnvApphcAppsRollbackEnabled, true)\n\tviper.SetDefault(appcommon.EnvApphcInternalAuthorizationEnabled, true)\n\tviper.SetDefault(appcommon.EnvApphcBearerToken, \"YXBwaG9zdGVyLWRlcGxveWVyLTIwMTgK\") // apphoster-deployer-2018 in base64\n\tviper.SetDefault(appcommon.EnvApphcAdaptersRancherEnabled, true)\n\tviper.SetDefault(appcommon.EnvApphMasterNodeUser, \"intucell\")\n\tviper.SetDefault(appcommon.EnvApphcAppFlexApiPort, 7000)\n\tviper.SetDefault(appcommon.EnvApphcPurgeAppMetadata, true)\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherClusterName, \"apphoster\")\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherCatalogProto, \"http\")\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherCatalogPassword, \"catalog\")\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherTemplatesCatalogBranch, \"master\")\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherAppsCatalogName, \"son-flex-apps\")\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherAppsCatalogBranch, \"master\")\n\tviper.SetDefault(rancher.EnvApphcAdaptersRancherServerCredsToken, \"kubeconfig-user-vxg8h:rf94p78gx2mk9fbmchvq7r9xbmzphhz42pltpskj2q8rdsf626n2sf\")\n\tviper.AutomaticEnv()\n\n\tvar formatter logrus.Formatter\n\tif viper.Get(appcommon.EnvApphcLogFormat).(appcommon.LogFormat) == appcommon.LogFormatJson {\n\t\tformatter = &logrus.JSONFormatter{}\n\t} else if viper.Get(appcommon.EnvApphcLogFormat).(appcommon.LogFormat) == appcommon.LogFormatText {\n\t\tformatter = &logrus.TextFormatter{}\n\t}\n\n\tlogrus.SetFormatter(formatter)\n\n\t// Set debug level\n\tif levelDebug {\n\t\tlogrus.SetLevel(logrus.DebugLevel)\n\t}\n\n\tverification()\n}", "func (cos ConfigOptions) Init(cmd *cobra.Command) error {\n\tfor _, co := range cos {\n\t\tif err := co.Init(cmd); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tco.SetDeprecated(cmd)\n\t}\n\treturn nil\n}", "func init() {\n\t// bootstrap cosmos-sdk config for kava chain\n\tkavaConfig := sdk.GetConfig()\n\tapp.SetBech32AddressPrefixes(kavaConfig)\n\tapp.SetBip44CoinType(kavaConfig)\n\tkavaConfig.Seal()\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".scarab\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".scarab\")\n\t}\n\n\tviper.SetEnvPrefix(\"scarab\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\tif err := viper.BindEnv(\"project\", \"CLOUDSDK_CORE_PROJECT\"); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tif err := viper.BindEnv(\"region\", \"CLOUDSDK_COMPUTE_REGION\"); err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\tvar config Configuration\n\tconfig.Databases = initAllDBConfig()\n\tconfig.Services = initAllServiceConfig()\n}", "func (c *Config) init() {\n\tif c.CAURL == \"\" {\n\t\tc.CAURL = letsEncryptDirURL\n\t}\n\n\t// Validate and reset state as needed for when dns is enabled\n\t// as a challenge option the DNSChallengeProvider.\n\tif c.EnableDNS01Challenge {\n\t\tif c.DNSProvider == nil {\n\t\t\tc.EnableDNS01Challenge = false\n\t\t\tc.excludedChallenges = []acme.Challenge{acme.DNS01}\n\t\t}\n\t} else {\n\t\tc.excludedChallenges = append(c.excludedChallenges, acme.DNS01)\n\t}\n\n\t// if tls-sni-01 challenge is not enabled, then add into exclusion\n\t// list.\n\tif !c.EnableTLSSNI01Challenge {\n\t\tc.excludedChallenges = append(c.excludedChallenges, acme.TLSSNI01)\n\t}\n\n\t// If http-01 challenge is not enabled, then validate that atleast one\n\t// challenge is enabled, then add http-01 into excluded list, else we must enable\n\t// http-01 has default challenge.\n\tif !c.EnableHTTP01Challenge {\n\t\tif !c.EnableTLSSNI01Challenge && !c.EnableDNS01Challenge {\n\t\t\tc.EnableHTTP01Challenge = true\n\t\t\tc.excludedChallenges = []acme.Challenge{acme.TLSSNI01, acme.DNS01}\n\t\t} else {\n\t\t\tc.excludedChallenges = append(c.excludedChallenges, acme.HTTP01)\n\t\t}\n\t}\n\n\t// if we have no ip/hostname set for the http/tls server then\n\t// set to default systems ip.\n\tif c.ListenerAddr == \"\" {\n\t\tc.ListenerAddr = \"0.0.0.0\"\n\t}\n\n\tif c.HTTPChallengePort == 0 {\n\t\tc.HTTPChallengePort = 80\n\t}\n\n\tif c.TLSSNIChallengePort == 0 {\n\t\tc.HTTPChallengePort = 443\n\t}\n\n\t// If not certificate filesystem is not provided, we will\n\t// utilize a os based filesystem storage.\n\tif c.CertificatesFileSystem == nil {\n\t\tc.CertificatesFileSystem = sysfs.NewSystemZapFS(\"./acme/certs\")\n\t}\n\n\t// If not user filesystem is not provided, we will\n\t// utilize a os based filesystem storage.\n\tif c.UsersFileSystem == nil {\n\t\tc.UsersFileSystem = sysfs.NewSystemZapFS(\"./acme/users\")\n\t}\n\n\t// if no tlscache is set then we need to provide one\n\t// ourselves.\n\tif c.TLSCertCache == nil {\n\t\tc.TLSCertCache = memc.New()\n\t}\n}", "func (c *Configurations) Init() error {\n\tc.Version = Version\n\tc.Location = \"Local\"\n\tc.Debug = Debug\n\n\t// server\n\tc.Server = &Server{}\n\tc.Server.Init()\n\n\t// redis init\n\tc.RedisConf = &RedisConf{}\n\tc.RedisConf.Init()\n\n\treturn nil\n}", "func initConfig() {\n\tif viper.GetBool(\"debug\") {\n\t\tlog.SetLevel(log.DebugLevel)\n\t\tlog.Debugf(\"Debug logging enabled!\")\n\t} else {\n\t\tlog.SetLevel(log.InfoLevel)\n\t}\n\n\tlog.Debugf(\"Cago Version: %s\", Version)\n\n\t// Enable overriding of configuration values using environment variables\n\tviper.SetEnvPrefix(\"CAGO\")\n\tviper.AutomaticEnv()\n\n\t// Check for the configuration file in the following order:\n\t// 1. Local configuration file specified using a command line argument\n\t// 2. Remote configuration file specified using the CAGO_CONFIG_URL environment variable\n\t// 3. Local configuration file cached from a previous download\n\t// 4. Local configuration file manually created by user\n\n\t// Step 1: Check to see if the configuration file path is set via command line argument\n\tconfigurationFilePath := viper.GetString(configFileFlagLong)\n\tif configurationFilePath != \"\" {\n\t\tlog.Debugf(\"Configuration file command line argument '%s' is set to: %s\", configFileFlagLong, configurationFilePath)\n\t} else {\n\t\tlog.Debugf(\"Configuration file command line argument '%s' is not set\", configFileFlagLong)\n\t}\n\n\t// Step 2 and 3: Download a remote file or used a previously cached version\n\tif configurationFilePath == \"\" {\n\t\tremoteConfigurationFileURL, ok := os.LookupEnv(\"CAGO_CONFIG_URL\")\n\t\tif ok {\n\t\t\tlog.Debugf(\"Environment variable '%s' is set to: %s\", remoteConfigFileEnvVariable, remoteConfigurationFileURL)\n\t\t\tconfigurationFilePath = getRemoteConfigurationFile(remoteConfigurationFileURL)\n\t\t} else {\n\t\t\tlog.Debugf(\"Environment variable '%s' is not set\", remoteConfigFileEnvVariable)\n\t\t}\n\t}\n\n\t// Step 4: Use a manually created local file\n\tif configurationFilePath == \"\" {\n\t\t// If the didn't load, try finding the remote configuration file\n\t\tconfigurationFilePath = getLocalConfigurationFile()\n\t}\n\n\tif configurationFilePath == \"\" {\n\t\tlog.Errorf(\"Cago could not find a configuration file to use! Here's what Cago checks:\")\n\t\tlog.Errorf(\" 1. Configuration file path specified using command line argument: %s\", configFileFlagLong)\n\t\tlog.Errorf(\" 2. Remote configuration file URL specified using environment variable: %s\", remoteConfigFileEnvVariable)\n\t\tlog.Errorf(\" 3. Previously cached remote configuration file in: %s\", cachedConfigFile)\n\t\tlog.Errorf(\" 4. Manually created configuration file here: %s\", localConfigFile)\n\n\t\tos.Exit(1)\n\t}\n\n\t// Read the configuration file\n\tviper.SetConfigFile(configurationFilePath)\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tlog.Fatalf(\"Could not process configuration file (%s), bailing out: %s\", configurationFilePath, err)\n\t\tos.Exit(1)\n\t}\n}", "func Init(baseCfg BaseConfig) Config {\n\t// urls := []string{}\n\n\tappConfig := Config{\n\t\tbaseCfg,\n\t\t// urls,\n\t}\n\n\treturn appConfig\n}", "func initConfig() {\n\tcredsViper = getViper(credsCfgFile, \"credentials\")\n\tconfigViper = getViper(configCfgFile, \"config\")\n\n\t// If a config file is found, read it in.\n\t_ = credsViper.ReadInConfig()\n\t_ = configViper.ReadInConfig()\n}", "func init() {\n\tinitCfgDir()\n\tinitCreds()\n}", "func (conf *ProjectConfig) Init() (err error) {\n\tfor _, taskName := range conf.SortedTaskNames {\n\t\ttask := conf.Tasks[taskName]\n\t\tif err = task.init(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn err\n}", "func initConfig() {\n\n\t_, hasToken := os.LookupEnv(\"PRIVATE_ACCESS_TOKEN\")\n\t_, hasURL := os.LookupEnv(\"CI_PROJECT_URL\")\n\tif !hasToken || !hasURL {\n\t\tlog.Fatal(\"You need to set 'CI_PROJECT_URL' and 'PRIVATE_ACCESS_TOKEN'\")\n\t}\n\n\tviper.Set(\"Token\", os.Getenv(\"PRIVATE_ACCESS_TOKEN\"))\n\tviper.Set(\"ProjectUrl\", os.Getenv(\"CI_PROJECT_URL\"))\n\n\tu, err := url.Parse(viper.GetString(\"ProjectUrl\"))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tviper.Set(\"BaseUrl\", fmt.Sprintf(\"%s://%s\", u.Scheme, u.Host))\n\tviper.Set(\"RegistryUrl\", fmt.Sprintf(\"%s/container_registry.json\", viper.GetString(\"ProjectUrl\")))\n\n}", "func Init(configPath ...string) {\n\tmgr = newAwsMgr(configPath...)\n}", "func initConfig(centralConfig corecfg.CentralConfig) (interface{}, error) {\n\trootProps := RootCmd.GetProperties()\n\t// Parse the config from bound properties and setup gateway config\n\tgatewayConfig = &config.GatewayConfig{\n\t\tSpecPath: rootProps.StringPropertyValue(\"3scale-api-gateway.specPath\"),\n\t\tConfigKey1: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_1\"),\n\t\tConfigKey2: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_2\"),\n\t\tConfigKey3: rootProps.StringPropertyValue(\"3scale-api-gateway.config_key_3\"),\n\t}\n\n\tagentConfig := config.AgentConfig{\n\t\tCentralCfg: centralConfig,\n\t\tGatewayCfg: gatewayConfig,\n\t}\n\treturn agentConfig, nil\n}", "func AllInit(kubeConfigPath string, verbose bool, with []string) error {\n\n\t// [ Verbosity System ]\n\tif verbose {\n\t\tlogger.BitwiseLevel = logger.LogEverything\n\t\tlogger.Always(\"*** [ Verbose Mode ] ***\")\n\t} else {\n\t\tlogger.BitwiseLevel = logger.LogAlways | logger.LogCritical | logger.LogWarning | logger.LogDeprecated\n\t}\n\n\t// [ Kubeconfig System ]\n\t// 1. Check if environmental variable is set\n\t// 2. Default to the --kubeconfig flag\n\t// 3. Follow the logic in the Clientcmd (path, masterURL, inCluster, default)\n\n\t// Format \"~\" in command line string\n\tkubeConfigPath = strings.ReplaceAll(kubeConfigPath, \"~\", homedir.HomeDir())\n\n\t// Here be dragons\n\t// We probably need an entire fucking client package, but for now\n\t// this will get us to 1.0.0\n\tenvVarValue := os.Getenv(KubeconfigEnvironmentalVariable)\n\tif envVarValue == \"\" {\n\t\tkubeConfigPathValue = kubeConfigPath\n\t} else {\n\t\tkubeConfigPathValue = envVarValue\n\t}\n\tlogger.Debug(\"Kubeconfig Value: %s\", kubeConfigPathValue)\n\n\treturn nil\n}", "func ConfigInit(envLookup *env.VarSet, jetstreamConfig *interfaces.PortalConfig) {\n\n\t// Check we are deployed in Cloud Foundry\n\tif !envLookup.IsSet(VCapApplication) {\n\t\treturn\n\t}\n\tisSQLite := jetstreamConfig.DatabaseProviderName == SQLiteProviderName\n\t// If session secret is default, make sure we change it\n\tif jetstreamConfig.SessionStoreSecret == defaultSessionSecret {\n\t\tif isSQLite {\n\t\t\t// If SQLIte - create a random value to use, since each app instance has its own DB\n\t\t\t// and sessions should not be accessible across different instances\n\t\t\tjetstreamConfig.SessionStoreSecret = uuid.NewV4().String()\n\t\t}\n\t\t// If not SQLite then we are using a shared DB\n\t\t// Just drop through and we'll later use a random value and log a warning\n\t\t// This means each instance has a different session secret - this is not a problem\n\t\t// due to session affinity - it means if the instance a user is bound to goes away, their session\n\t\t// will also be lost and they will need to log in again\n\t} else {\n\t\t// Else, if not default and is SQLlite - add the App Index to the secret\n\t\t// This makes sure we use a different Session Secret per App Instance IF using SQLite\n\t\t// Since this is not a shared database across application instances\n\t\tif isSQLite && envLookup.IsSet(\"CF_INSTANCE_INDEX\") {\n\t\t\tappInstanceIndex, ok := envLookup.Lookup(\"CF_INSTANCE_INDEX\")\n\t\t\tif ok {\n\t\t\t\tjetstreamConfig.SessionStoreSecret = jetstreamConfig.SessionStoreSecret + \"_\" + appInstanceIndex\n\t\t\t\tlog.Infof(\"Updated session secret for Cloud Foundry App Instance: %s\", appInstanceIndex)\n\t\t\t}\n\t\t}\n\t}\n\n\t// Update Database migration status depending on app instance index and SQLite\n\tif !isSQLite && envLookup.IsSet(\"CF_INSTANCE_INDEX\") {\n\t\tif appInstanceIndex, ok := envLookup.Lookup(\"CF_INSTANCE_INDEX\"); ok {\n\t\t\tif index, err := strconv.Atoi(appInstanceIndex); err == nil {\n\t\t\t\tjetstreamConfig.CanMigrateDatabaseSchema = (index == 0)\n\t\t\t\tlog.Infof(\"Skipping DB migration => not index 0 (%d)\", index)\n\t\t\t}\n\t\t}\n\t}\n}", "func Init() map[string]interface{} {\n\n\tconfig := make(map[string]interface{})\n\tmaps := make(map[string]interface{})\n\tfor _, v := range common.Protocols {\n\t\tswitch v {\n\t\tcase \"ipfs\":\n\t\t\tmaps[\"ipfs\"] = \"place\"\n\t\t}\n\t}\n\tconfig[\"domains\"] = Domains{\n\t\tPatterns: []string{\"test\"},\n\t\tSeen: []string{\"testtest\"},\n\t}\n\tconfig[\"map\"] = maps\n\treturn config\n}", "func Init(options ...ConfigOption) *Registrar {\n\n\topt := &ConfigOptions{}\n\n\tfor _, op := range options {\n\t\terr := op(opt)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error rendering configuration: %v\", err)\n\t\t}\n\t}\n\n\thost := defaultHost\n\n\tif opt.host != \"\" {\n\t\thost = opt.host\n\t}\n\n\t// If an environment variable is set, override\n\tif os.Getenv(\"STACK_REG_HOST\") != \"\" {\n\t\thost = os.Getenv(\"STACK_REG_HOST\")\n\t}\n\n\tprovider := providers.Kong(host)\n\n\tif opt.provider != nil {\n\t\tprovider = opt.provider\n\t}\n\n\treturn &Registrar{host, provider}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\thome := findHome()\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".ccv2ctl\")\n\t}\n\tviper.SetEnvPrefix(\"CCV2\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\tif err := viper.ReadInConfig(); err == nil {\n\t}\n}", "func init() {\n\tv := initViper()\n\tconf = Config{\n\t\tApp: appConfig(v),\n\t\tMysql: mysqlConfig(v),\n\t\tRedis: redisConfig(v),\n\t\tLog: logConfig(v),\n\t\tElasticAPM: elasticApmConfig(v),\n\t\tSentry: sentryConfig(v),\n\t}\n}", "func init() {\n\tcallbacks = make(map[ModuleType]*ConfigCallback, 8)\n\tmodules = make(map[string]ModuleType, 32)\n}", "func initConfig() {\n\tviper.SetEnvPrefix(\"EOS_BIOS\")\n\tviper.AutomaticEnv() // read in environment variables that match\n}", "func initConfig() {\n if cfgFile != \"\" {\n // Use config file from the flag.\n viper.SetConfigFile(cfgFile)\n } else {\n // Find home directory.\n // home, err := homedir.Dir()\n // if err != nil {\n // fmt.Println(err)\n // os.Exit(1)\n // }\n\n // Search config in current directory with name \"gobcos_config\" (without extension).\n viper.AddConfigPath(\".\")\n viper.SetConfigName(\"gobcos_config\")\n }\n\n viper.AutomaticEnv() // read in environment variables that match\n\n // If a config file is found, read it in.\n if err := viper.ReadInConfig(); err == nil {\n if viper.IsSet(\"GroupID\") {\n GroupID = uint(viper.GetInt(\"GroupID\"))\n } else {\n fmt.Println(\"GroupID has not been set, please check the config file gobcos_config.yaml\")\n os.Exit(1)\n }\n if viper.IsSet(\"RPCurl\") {\n URL = viper.GetString(\"RPCurl\")\n } else {\n fmt.Println(\"RPCurl has not been set, please check the config file gobcos_config.yaml\")\n os.Exit(1)\n }\n RPC = getClient(URL, GroupID)\n }\n}", "func (s *Server) initConfig() (err error) {\n\t// Home directory is current working directory by default\n\tif s.HomeDir == \"\" {\n\t\ts.HomeDir, err = os.Getwd()\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"Failed to get server's home directory\")\n\t\t}\n\t}\n\t// Make home directory absolute, if not already\n\tabsoluteHomeDir, err := filepath.Abs(s.HomeDir)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Failed to make server's home directory path absolute: %s\", err)\n\t}\n\ts.HomeDir = absoluteHomeDir\n\t// Create config if not set\n\tif s.Config == nil {\n\t\ts.Config = new(ServerConfig)\n\t}\n\ts.CA.server = s\n\ts.CA.HomeDir = s.HomeDir\n\terr = s.initMultiCAConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\trevoke.SetCRLFetcher(s.fetchCRL)\n\t// Make file names absolute\n\ts.makeFileNamesAbsolute()\n\n\tcompModeStr := os.Getenv(\"FABRIC_CA_SERVER_COMPATIBILITY_MODE_V1_3\")\n\tif compModeStr == \"\" {\n\t\tcompModeStr = \"true\" // TODO: Change default to false once all clients have been updated to use the new authorization header\n\t}\n\n\ts.Config.CompMode1_3, err = strconv.ParseBool(compModeStr)\n\tif err != nil {\n\t\treturn errors.WithMessage(err, \"Invalid value for boolean environment variable 'FABRIC_CA_SERVER_COMPATIBILITY_MODE_V1_3'\")\n\t}\n\n\treturn nil\n}", "func init() {\n\tcore.RegisterConfigGroup(defaultConfigs)\n\tcore.RegisterServiceWithConfig(\"api\", &api.ApiServiceFactory{}, api.Configs)\n\tcore.RegisterServiceWithConfig(\"collector\", &collector.CollectorServiceFactory{}, collector.Configs)\n}", "func (o *OIDC) Init(config Config) (err error) {\n\tswitch {\n\tcase o.Type == \"\":\n\t\treturn errors.New(\"type cannot be empty\")\n\tcase o.Name == \"\":\n\t\treturn errors.New(\"name cannot be empty\")\n\tcase o.ClientID == \"\":\n\t\treturn errors.New(\"clientID cannot be empty\")\n\tcase o.ConfigurationEndpoint == \"\":\n\t\treturn errors.New(\"configurationEndpoint cannot be empty\")\n\t}\n\n\t// Validate listenAddress if given\n\tif o.ListenAddress != \"\" {\n\t\tif _, _, err := net.SplitHostPort(o.ListenAddress); err != nil {\n\t\t\treturn errors.Wrap(err, \"error parsing listenAddress\")\n\t\t}\n\t}\n\n\t// Decode and validate openid-configuration endpoint\n\tu, err := url.Parse(o.ConfigurationEndpoint)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"error parsing %s\", o.ConfigurationEndpoint)\n\t}\n\tif !strings.Contains(u.Path, \"/.well-known/openid-configuration\") {\n\t\tu.Path = path.Join(u.Path, \"/.well-known/openid-configuration\")\n\t}\n\tif err := getAndDecode(u.String(), &o.configuration); err != nil {\n\t\treturn err\n\t}\n\tif err := o.configuration.Validate(); err != nil {\n\t\treturn errors.Wrapf(err, \"error parsing %s\", o.ConfigurationEndpoint)\n\t}\n\t// Replace {tenantid} with the configured one\n\tif o.TenantID != \"\" {\n\t\to.configuration.Issuer = strings.ReplaceAll(o.configuration.Issuer, \"{tenantid}\", o.TenantID)\n\t}\n\t// Get JWK key set\n\to.keyStore, err = newKeyStore(o.configuration.JWKSetURI)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\to.ctl, err = NewController(o, o.Claims, config, o.Options)\n\treturn\n}", "func initConfig() {\n\t// Find home directory.\n\thome, err := os.UserHomeDir()\n\tzenDir := home + \"/.zen\"\n\n\tcobra.CheckErr(err)\n\n\t// load the config data\n\tcfg = config.InitConfig(zenDir)\n\n\t// set default exec and runner\n\tcfg.AppCfg.Executor = &plugins.DefaultExecutor{}\n\tcfg.AppCfg.Runner = &plugins.DefaultRunner{}\n\n\t// load plugin from path based on config default\n\tfor _, plugin := range cfg.Plugins.Runners {\n\t\tif plugin.Name == cfg.AppCfg.RunnerID {\n\t\t\tplugins.LoadPlugin(plugin.Path)\n\t\t\tcfg.AppCfg.Runner = plugins.ZenPluginRegistry.Runner\n\t\t}\n\t}\n\n\tfor _, plugin := range cfg.Plugins.Executors {\n\t\tif plugin.Name == cfg.AppCfg.ExecutorID {\n\t\t\tplugins.LoadPlugin(plugin.Path)\n\t\t\tcfg.AppCfg.Executor = plugins.ZenPluginRegistry.Executor\n\t\t}\n\t}\n\n}", "func init() {\n\tconfig = Config{DB: make(map[int]models.User, 10), Friends: make(map[int][]int, 10)}\n\n\t// Just to make PrivateKey assign on the next line\n\tvar err error\n\n\tconfig.PrivateKey, err = ioutil.ReadFile(\"./config/keys/key.pem\")\n\tif err != nil {\n\t\tlog.Println(\"Error reading private key\")\n\t\tlog.Println(\"private key reading error: \", err)\n\t\treturn\n\t}\n\n\tconfig.CertKey, err = ioutil.ReadFile(\"./config/keys/cert.pem\")\n\tif err != nil {\n\t\tlog.Println(\"Error reading cert key\")\n\t\tlog.Println(\"cert key error: \", err)\n\t\treturn\n\t}\n\n}", "func initConfig() {\n\tif debug {\n\t\tlog.SetLevel(log.DebugLevel)\n\t} else {\n\t\tlog.SetLevel(log.InfoLevel)\n\t}\n\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tlog.Error(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".s3s2\" (with extension!!!).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".s3s2\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tlog.Debug(\"Using config file:\", viper.ConfigFileUsed())\n\t} else {\n\t\t//Uncomment if problems picking up config file.\n\t\t//fmt.Println(err)\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" { // enable ability to specify config file via flag\n\t\tviper.SetConfigFile(cfgFile)\n\t}\n\n\tviper.SetConfigName(\".kaudit\") // name of config file (without extension)\n\tviper.AddConfigPath(\"$HOME\") // adding home directory as first search path\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n\n\n\n\n\n\t// use the current context in kubeconfig\n\tconfig, err := clientcmd.BuildConfigFromFlags(\"\", *kubeconfig)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\n\t// create the clientset\n\tclientset, err = kubernetes.NewForConfig(config)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n}", "func initConfig() {\n\n\t// Find home directory.\n\thome, err := homedir.Dir()\n\tif err != nil {\n\t\tfmt.Println(err)\n\t\tos.Exit(1)\n\t}\n\n\t// Search config in home directory with name \".etherpad-cli\" (without extension).\n\tviper.AddConfigPath(home)\n\tviper.SetConfigName(configFile)\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err != nil {\n\t\tfmt.Printf(\"error reading config file (%s): %s\\n\", viper.ConfigFileUsed(), err)\n\t\tos.Exit(0)\n\t}\n\n\t// setup etherpad client\n\tetherClient = etherpadlite.NewEtherpadLite(viper.GetString(\"etherpad_token\"))\n\tetherClient.BaseURL = viper.GetString(\"etherpad_hostname\")\n}", "func init() {\n\t// set default values for tls\n\tviper.SetDefault(\"tls-cert-file\", constants.TLSCertPath)\n\tviper.SetDefault(\"tls-key-file\", constants.TLSKeyPath)\n\tviper.SetDefault(\"san-list\", constants.DefaultTlsSan)\n\n\t// set default values for log\n\tviper.SetDefault(\"log-max-length\", constants.DefaultLogEntryMaxlength)\n\tviper.SetDefault(\"log-enable-stdout\", true)\n\tviper.SetDefault(\"log-level\", \"info\")\n\n\t// set default values for server\n\tviper.SetDefault(\"server-port\", constants.DefaultPort)\n\tviper.SetDefault(\"server-read-timeout\", constants.DefaultReadTimeout)\n\tviper.SetDefault(\"server-read-header-timeout\", constants.DefaultReadHeaderTimeout)\n\tviper.SetDefault(\"server-write-timeout\", constants.DefaultWriteTimeout)\n\tviper.SetDefault(\"server-idle-timeout\", constants.DefaultIdleTimeout)\n\tviper.SetDefault(\"server-max-header-bytes\", constants.DefaultMaxHeaderBytes)\n\n\tviper.SetDefault(\"cms-ca-cert-validity\", constants.DefaultCACertValidity)\n\tviper.SetDefault(\"cms-ca-organization\", constants.DefaultOrganization)\n\tviper.SetDefault(\"cms-ca-locality\", constants.DefaultLocality)\n\tviper.SetDefault(\"cms-ca-province\", constants.DefaultProvince)\n\tviper.SetDefault(\"cms-ca-country\", constants.DefaultCountry)\n\n\tviper.SetDefault(\"aas-tls-cn\", constants.DefaultAasTlsCn)\n\tviper.SetDefault(\"aas-jwt-cn\", constants.DefaultAasJwtCn)\n\tviper.SetDefault(\"aas-tls-san\", constants.DefaultTlsSan)\n\n\tviper.SetDefault(\"token-duration-mins\", constants.DefaultTokenDurationMins)\n}", "func (conf *Config) Init(croniclePath string) error {\n\t// Assign the path for each task or schedule repo\n\tconf.PropigateTaskProperties(croniclePath)\n\tif err := conf.Validate(); err != nil {\n\t\treturn err\n\t}\n\n\t//If conf.Repo is a given repo, clone and fetch\n\tif conf.Repo != nil {\n\t\tauth, err := conf.Repo.Auth()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tg, err := Clone(croniclePath, conf.Repo.URL, &auth)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := g.Checkout(conf.Repo.Branch, conf.Repo.Commit); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tfor _, schedule := range conf.Schedules {\n\t\tfor _, task := range schedule.Tasks {\n\t\t\tif err := task.Validate(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif task.Repo != nil {\n\t\t\t\tauth, err := task.Repo.Auth()\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tif _, err := Clone(task.Path, task.Repo.URL, &auth); err != nil {\n\t\t\t\t\t// if _, err := Clone(task.Path, task.Repo.URL, task.Repo.DeployKey); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func initConfig() error {\n newConfig := ClientConfig{\n Characters: []CharacterInfo{},\n }\n\n return SaveConfig(newConfig)\n}", "func init() {\n\tif e, ok := os.LookupEnv(EnvConsul); ok && e != \"\" {\n\t\tconsulAddr = e\n\t}\n\tif consulAddr == \"--\" {\n\t\treturn\n\t}\n\tif consulAddr == \"-\" || (env.InTest() && consulAddr == localConsulAdr) {\n\t\tnoConsulTestMode()\n\t\treturn\n\t}\n\tif _, _, err := net.SplitHostPort(consulAddr); err != nil {\n\t\tconsulAddr = consulAddr + \":8500\"\n\t}\n\tif e, ok := os.LookupEnv(EnvFederatedDcs); ok {\n\t\tfederatedDcs = strings.Fields(e)\n\t}\n\trand.Seed(time.Now().UTC().UnixNano())\n\n\tmustConnect()\n\tupdateEnv()\n}", "func Init(conf *viper.Viper) {\n\tfor name, output := range Outputs {\n\t\toutput.LoadConf(conf.Sub(name))\n\t}\n}", "func NewConfig(essentialfiles, commonfiles []string) (*Config, error) {\n\t// created config factory object\n\tfactory, err := goarchaius.NewConfigFactory(lager.Logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfactory.DeInit()\n\tfactory.Init()\n\n\tfiles := make([]string, 0)\n\t// created file source object\n\tfileSource := filesource.NewYamlConfigurationSource()\n\t// adding all files with file source\n\tfor _, v := range essentialfiles {\n\t\tif err := fileSource.AddFileSource(v, filesource.DefaultFilePriority); err != nil {\n\t\t\tlager.Logger.Errorf(err, \"add file source error.\")\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, v)\n\t}\n\tfor _, v := range commonfiles {\n\t\t_, err := os.Stat(v)\n\t\tif os.IsNotExist(err) {\n\t\t\tlager.Logger.Infof(\"[%s] not exist\", v)\n\t\t\tcontinue\n\t\t}\n\t\tif err := fileSource.AddFileSource(v, filesource.DefaultFilePriority); err != nil {\n\t\t\tlager.Logger.Infof(\"%v\", err)\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, v)\n\t}\n\n\terr = factory.AddSource(fileSource)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tconf := &Config{\n\t\tConfigFiles: files,\n\t\tConfigFactory: factory,\n\t}\n\n\teventHandler := EventListener{\n\t\tName: \"EventHandler\",\n\t\tFactory: factory,\n\t}\n\n\tfactory.RegisterListener(eventHandler, \"a*\")\n\n\tlager.Logger.Infof(\"Configuration files: %s\", strings.Join(files, \", \"))\n\treturn conf, nil\n}", "func initConfig() *domain.ApplicationConfig {\n\tconfigPath := util.GetEnv(envConfigPath, defaultConfigPath)\n\tprefix := util.GetEnv(envConfigPrefix, defaultConfigPrefix)\n\tcfg, err := util.ReadConfig(configPath, prefix)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\treturn cfg\n}", "func init() {\n\tcfg, err := config.LoadDefaultConfig(context.TODO(), config.WithSharedConfigProfile(\"tavern-automation\"))\n\tif err != nil {\n\t\tlog.Fatal(\"ERROR: Unable to resolve credentials for tavern-automation: \", err)\n\t}\n\n\tstsc = sts.NewFromConfig(cfg)\n\torgc = organizations.NewFromConfig(cfg)\n\tec2c = ec2.NewFromConfig(cfg)\n\n\t// NOTE: By default, only describes regions that are enabled in the root org account, not all Regions\n\tresp, err := ec2c.DescribeRegions(context.TODO(), &ec2.DescribeRegionsInput{})\n\tif err != nil {\n\t\tlog.Fatal(\"ERROR: Unable to describe regions\", err)\n\t}\n\n\tfor _, region := range resp.Regions {\n\t\tregions = append(regions, *region.RegionName)\n\t}\n\tfmt.Println(\"INFO: Listing all enabled regions:\")\n\tfmt.Println(regions)\n}", "func Init(ac *atmi.ATMICtx) int {\n\n\tac.TpLogWarn(\"Doing server init...\")\n\tif err := ac.TpInit(); err != nil {\n\t\treturn FAIL\n\t}\n\n\t//Get the configuration\n\n\t//Allocate configuration buffer\n\tbuf, err := ac.NewUBF(16 * 1024)\n\tif nil != err {\n\t\tac.TpLogError(\"Failed to allocate buffer: [%s]\", err.Error())\n\t\treturn FAIL\n\t}\n\n\tbuf.BChg(u.EX_CC_CMD, 0, \"g\")\n\tbuf.BChg(u.EX_CC_LOOKUPSECTION, 0, fmt.Sprintf(\"%s/%s\", PROGSECTION, os.Getenv(\"NDRX_CCTAG\")))\n\n\tif _, err := ac.TpCall(\"@CCONF\", buf, 0); nil != err {\n\t\tac.TpLogError(\"ATMI Error %d:[%s]\\n\", err.Code(), err.Message())\n\t\treturn FAIL\n\t}\n\n\t//Dump to log the config read\n\tbuf.TpLogPrintUBF(atmi.LOG_DEBUG, \"Got configuration.\")\n\n\toccs, _ := buf.BOccur(u.EX_CC_KEY)\n\n\t// Load in the config...\n\tfor occ := 0; occ < occs; occ++ {\n\t\tac.TpLogDebug(\"occ %d\", occ)\n\t\tfldName, err := buf.BGetString(u.EX_CC_KEY, occ)\n\n\t\tif nil != err {\n\t\t\tac.TpLogError(\"Failed to get field \"+\n\t\t\t\t\"%d occ %d\", u.EX_CC_KEY, occ)\n\t\t\treturn FAIL\n\t\t}\n\n\t\tac.TpLogDebug(\"Got config field [%s]\", fldName)\n\n\t\tswitch fldName {\n\n\t\tcase \"mykey1\":\n\t\t\tmyval, _ := buf.BGetString(u.EX_CC_VALUE, occ)\n\t\t\tac.TpLogDebug(\"Got [%s] = [%s] \", fldName, myval)\n\t\t\tbreak\n\n\t\tdefault:\n\n\t\t\tbreak\n\t\t}\n\t}\n\t//Advertize TESTSVC\n\tif err := ac.TpAdvertise(\"TESTSVC\", \"TESTSVC\", TESTSVC); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\",\n\t\t\terr.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\tif err := ac.TpAdvertise(\"CORSVC\", \"CORSVC\", CORSVC); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\",\n\t\t\terr.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\tif err := ac.TpAdvertise(\"CONSTAT\", \"CONSTAT\", CONSTAT); err != nil {\n\t\tac.TpLogError(\"Failed to Advertise: ATMI Error %d:[%s]\\n\",\n\t\t\terr.Code(), err.Message())\n\t\treturn atmi.FAIL\n\t}\n\n\treturn SUCCEED\n}", "func initConfig() {\n\tviper.AutomaticEnv()\n}", "func init() {\n\tconfig := domain.LoadCfg(\"manga\")\n\tif config == nil {\n\t\tlog.Info(\"用户未设置漫画相关的配置\")\n\t\treturn\n\t}\n\tmangaClockIn.config = config\n\ttaskList = append(taskList, mangaClockIn)\n}", "func (c *configData) init() {\n\tconst filename = \".workflow.yml\"\n\n\tc.Global = viper.New()\n\tc.Local = viper.New()\n\n\t// c.Local.SetConfigFile(\n\t// \tpath.Join(git.RootDir(), filename),\n\t// )\n\n\tc.Global.SetConfigFile(\n\t\tpath.Join(currentUser.HomeDir, filename),\n\t)\n\n\t// TODO: configs := []*viper.Viper{c.Local, c.Global}\n\tconfigs := []*viper.Viper{c.Global}\n\tfor _, v := range configs {\n\t\t_, _ = file.Touch(v.ConfigFileUsed())\n\t\tfailIfError(v.ReadInConfig())\n\t}\n\n\tfailIfError(c.validate())\n\tfailIfError(c.update())\n\tc.initJira()\n}", "func Init(options *types.Options) error {\n\tuarand.Default = uarand.NewWithCustomList(userAgents)\n\n\tif err := protocolstate.Init(options); err != nil {\n\t\treturn err\n\t}\n\tif err := dnsclientpool.Init(options); err != nil {\n\t\treturn err\n\t}\n\tif err := httpclientpool.Init(options); err != nil {\n\t\treturn err\n\t}\n\tif err := signerpool.Init(options); err != nil {\n\t\treturn err\n\t}\n\tif err := networkclientpool.Init(options); err != nil {\n\t\treturn err\n\t}\n\tif err := rdapclientpool.Init(options); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".coscli\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".coscli\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\t//fmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func init() {\n\truntime.GOMAXPROCS(runtime.NumCPU())\n\tappAddr = os.Getenv(\"APP_ADDR\") // e.g. \"0.0.0.0:8080\" or \"\"\n\n\tconf = new(app.ConfigConode)\n\tif err := app.ReadTomlConfig(conf, defaultConfigFile); err != nil {\n\t\tfmt.Printf(\"Couldn't read configuration file: %v\", err)\n\t\tos.Exit(1)\n\t}\n\n\tsuite = app.GetSuite(conf.Suite)\n\tpub, _ := base64.StdEncoding.DecodeString(conf.AggPubKey)\n\tsuite.Read(bytes.NewReader(pub), &public_X0)\n}", "func initConfig() {\n\t// default delimiter is \".\" and emails contain these\n\tv := viper.NewWithOptions(viper.KeyDelimiter(\"::\"))\n\tv.SetConfigType(\"json\")\n\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tv.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".config/jira\" (without extension).\n\t\tv.AddConfigPath(home + \"/.config\")\n\t\tv.SetConfigName(\"jira\")\n\t\tcfgFile = home + \"/.config/jira\"\n\t}\n\n\t// If a config file is found, read it in.\n\tif err := v.ReadInConfig(); err != nil {\n\t\tfmt.Println(\"Unable to read config using config file:\", v.ConfigFileUsed())\n\t\treturn\n\t}\n\n\tjiraConfig = &atlassian.Config{\n\t\tToken: getEnv(\"ATLASSIAN_API_TOKEN\", v.GetString(\"token\")),\n\t\tUser: getEnv(\"ATLASSIAN_API_USER\", v.GetString(\"user\")),\n\t\tHost: getEnv(\"ATLASSIAN_HOST\", v.GetString(\"host\")),\n\t}\n\tjiraClient = atlassian.GetJIRAClient(jiraConfig)\n}", "func (c *Objs) Initialize(i util.XapiClient) {\n c.Address = &addr.Addr{}\n c.Address.Initialize(i)\n\n c.AddressGroup = &addrgrp.AddrGrp{}\n c.AddressGroup.Initialize(i)\n\n c.Services = &srvc.Srvc{}\n c.Services.Initialize(i)\n}", "func (dc *Client) Init() error {\n\tdc.GenericServicePool = make(map[string]*dg.GenericService, 4)\n\n\tcls := config.GetBootstrap().StaticResources.Clusters\n\n\t// dubbogo comsumer config\n\tdgCfg = dg.ConsumerConfig{\n\t\tCheck: new(bool),\n\t\tRegistries: make(map[string]*dg.RegistryConfig, 4),\n\t}\n\tdgCfg.ApplicationConfig = defaultApplication\n\tfor i := range cls {\n\t\tc := cls[i]\n\t\tdgCfg.Request_Timeout = c.RequestTimeoutStr\n\t\tdgCfg.Connect_Timeout = c.ConnectTimeoutStr\n\t\tfor k, v := range c.Registries {\n\t\t\tif len(v.Protocol) == 0 {\n\t\t\t\tlogger.Warnf(\"can not find registry protocol config, use default type 'zookeeper'\")\n\t\t\t\tv.Protocol = defaultDubboProtocol\n\t\t\t}\n\t\t\tdgCfg.Registries[k] = &dg.RegistryConfig{\n\t\t\t\tProtocol: v.Protocol,\n\t\t\t\tAddress: v.Address,\n\t\t\t\tTimeoutStr: v.Timeout,\n\t\t\t\tUsername: v.Username,\n\t\t\t\tPassword: v.Password,\n\t\t\t}\n\t\t}\n\t}\n\n\tinitDubbogo()\n\n\treturn nil\n}", "func InitializeConfig(config *Config) {\n\tconfig.CSRInitialRetrialInterval = defaultCSRInitialRetrialInterval\n\tconfig.CSRMaxRetries = defaultCSRMaxRetries\n\tconfig.CSRGracePeriodPercentage = defaultCSRGracePeriodPercentage\n\tconfig.PlatformConfig = platform.ClientConfig{}\n}", "func (b *BotConfig) Init() error {\n\tif b.ASSET_CODE_A == b.ASSET_CODE_B && b.ISSUER_A == b.ISSUER_B {\n\t\treturn fmt.Errorf(\"error: both assets cannot be the same '%s:%s'\", b.ASSET_CODE_A, b.ISSUER_A)\n\t}\n\n\tasset, e := parseAsset(b.ASSET_CODE_A, b.ISSUER_A, \"A\")\n\tif e != nil {\n\t\treturn e\n\t}\n\tb.assetBase = *asset\n\n\tasset, e = parseAsset(b.ASSET_CODE_B, b.ISSUER_B, \"B\")\n\tif e != nil {\n\t\treturn e\n\t}\n\tb.assetQuote = *asset\n\n\tb.tradingAccount, e = utils.ParseSecret(b.TRADING_SECRET_SEED)\n\tif e != nil {\n\t\treturn e\n\t}\n\tif b.tradingAccount == nil {\n\t\treturn fmt.Errorf(\"no trading account specified\")\n\t}\n\n\tb.sourceAccount, e = utils.ParseSecret(b.SOURCE_SECRET_SEED)\n\treturn e\n}", "func initializeAntreaIPAM(t *testing.T, data *TestData) {\n\tp80 = 80\n\tp81 = 81\n\tp8080 = 8080\n\tp8081 = 8081\n\tp8082 = 8082\n\tp8085 = 8085\n\tpods = []string{\"a\", \"b\", \"c\"}\n\tnamespaces = make(map[string]string)\n\tregularNamespaces := make(map[string]string)\n\tsuffix := randName(\"\")\n\tnamespaces[\"x\"] = \"antrea-x-\" + suffix\n\tregularNamespaces[\"x\"] = namespaces[\"x\"]\n\t// This function \"initializeAntreaIPAM\" will be used more than once, and variable \"allPods\" is global.\n\t// It should be empty every time when \"initializeAntreaIPAM\" is performed, otherwise there will be unexpected\n\t// results.\n\tallPods = []Pod{}\n\tpodsByNamespace = make(map[string][]Pod)\n\n\tfor _, ns := range antreaIPAMNamespaces {\n\t\tnamespaces[ns] = ns\n\t}\n\n\tfor _, podName := range pods {\n\t\tfor _, ns := range namespaces {\n\t\t\tallPods = append(allPods, NewPod(ns, podName))\n\t\t\tpodsByNamespace[ns] = append(podsByNamespace[ns], NewPod(ns, podName))\n\t\t}\n\t}\n\n\tvar err error\n\t// k8sUtils is a global var\n\tk8sUtils, err = NewKubernetesUtils(data)\n\tfailOnError(err, t)\n\t_, err = k8sUtils.Bootstrap(regularNamespaces, pods, true)\n\tfailOnError(err, t)\n\tips, err := k8sUtils.Bootstrap(namespaces, pods, false)\n\tfailOnError(err, t)\n\tpodIPs = *ips\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".btrfaasctl\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".btrfaasctl\")\n\t}\n\n\tviper.SetEnvPrefix(\"btrfaas\")\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func initConfig() {\n\t// System Config File\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\tviper.SetConfigName(\"config.yaml\")\n\n\t}\n\t// Get Env\n\tviper.AutomaticEnv()\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tlog.Log(\"msg\", \"Using config file:\"+viper.ConfigFileUsed())\n\t}\n\n\t// Offerings file\n\toffers = viper.New()\n\tif err := checkOfferFile(offerFile, offers); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use configs file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find working directory.\n\t\twd, err := os.Getwd()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t}\n\n\t\t// Search configs in working directory with name \".bao\" (without extension).\n\t\tviper.AddConfigPath(wd)\n\t\tviper.SetConfigName(\".bao\")\n\t}\n\n\t//viper.AutomaticEnv() // read in environment variables that match\n\n\t// If a configs file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using configs file:\", viper.ConfigFileUsed())\n\t} else {\n\t\tfmt.Println(\"Could not find .bao.yml in project root. Default build options will be used\")\n\t}\n}", "func initConfig() {\n\t// Override our config with any matching environment variables\n\tviper.AutomaticEnv()\n\n\t// Set environment variable prefix, eg: VAULT_UTIL_AWS_REGION\n\tviper.SetEnvPrefix(\"vault_util\")\n\n\t// Load the config file\n\tsettingsFile := ciutils.SettingsPath(\"vault-util.toml\")\n\tsettingsFileExists, err := fs.Exists(settingsFile)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tif settingsFileExists {\n\t\tviper.SetConfigFile(settingsFile)\n\n\t\tif err := viper.ReadInConfig(); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t}\n\n\t// Override our config with any matching environment variables\n\tviper.AutomaticEnv()\n}", "func Initialize(cfg Config) {\n\tvar err error\n\tif cfg.UseKms {\n\t\t// FIXME(xnum): set at cmd.\n\t\tif utils.FullnodeCluster != utils.Environment() {\n\t\t\tif err = initKmsClient(); err != nil {\n\t\t\t\tpanic(err.Error())\n\t\t\t}\n\t\t}\n\t}\n\n\tswitch cfg.Source {\n\tcase None:\n\t\tgetters = []Getter{noneGetter}\n\tcase K8S:\n\t\tgetters = []Getter{k8sGetter}\n\tcase File:\n\t\tgetters = []Getter{staticGetter}\n\t\tif err = initDataFromFile(); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t// FIXME(xnum): not encourge to use. It depends on env.\n\tcase Auto:\n\t\tif utils.Environment() == utils.LocalDevelopment ||\n\t\t\tutils.Environment() == utils.CI {\n\t\t\tgetters = []Getter{staticGetter}\n\t\t\terr := initDataFromFile()\n\t\t\tif err != nil {\n\t\t\t\tlog.Panicln(err)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tgetters = []Getter{k8sGetter}\n\t}\n}", "func initRunConfig() (*runConfig, error) {\n\t// Find the server binary for each phase\n\tpCmd := flagServerCmd\n\tif flagParseServerCmd != \"\" {\n\t\tpCmd = flagParseServerCmd\n\t}\n\tif pCmd == \"\" {\n\t\treturn nil, fmt.Errorf(\"no parse server defined\")\n\t}\n\n\tcCmd := flagServerCmd\n\tif flagCheckServerCmd != \"\" {\n\t\tcCmd = flagCheckServerCmd\n\t}\n\tif cCmd == \"\" {\n\t\treturn nil, fmt.Errorf(\"no check server defined\")\n\t}\n\n\teCmd := flagServerCmd\n\tif flagEvalServerCmd != \"\" {\n\t\teCmd = flagEvalServerCmd\n\t}\n\tif eCmd == \"\" {\n\t\treturn nil, fmt.Errorf(\"no eval server defined\")\n\t}\n\n\t// Only launch each required binary once\n\tservers := make(map[string]celrpc.ConfClient)\n\tservers[pCmd] = nil\n\tservers[cCmd] = nil\n\tservers[eCmd] = nil\n\tfor cmd := range servers {\n\t\tvar cli celrpc.ConfClient\n\t\tvar err error\n\t\tif flagPipe {\n\t\t\tcli, err = celrpc.NewPipeClient(cmd, flagPipeBase64)\n\t\t} else {\n\t\t\tcli, err = celrpc.NewGrpcClient(cmd)\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tservers[cmd] = cli\n\t}\n\n\tvar rc runConfig\n\trc.parseClient = servers[pCmd]\n\trc.checkClient = servers[cCmd]\n\trc.evalClient = servers[eCmd]\n\trc.checkedOnly = flagCheckedOnly\n\trc.skipCheck = flagSkipCheck\n\treturn &rc, nil\n}", "func initConfig() {\n\tviper.AutomaticEnv() // read in environment variables that match\n}", "func (c *Config) init() {\n\n\tc.logger = logrus.New()\n\n\t// Connect slots\n\tc.ConnectStringSet(func(key string, val string) {\n\t\tc.SetString(key, val)\n\t})\n\tc.ConnectBoolSet(func(key string, val bool) {\n\t\tc.SetBool(key, val)\n\t})\n\tc.ConnectStringValue(func(key string) string {\n\t\treturn c.GetString(key)\n\t})\n\tc.ConnectBoolValue(func(key string) bool {\n\t\treturn c.GetBool(key)\n\t})\n\tc.ConnectSave(func() {\n\t\tc.save()\n\t})\n\tc.ConnectDefaults(func() {\n\t\tc.SetDefaults()\n\t})\n}", "func (baseCfg *BaseConfig) InitConfig(configPath string) {\n\tconfig, err := LoadConfig(configPath)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_ROUTER, err = jsonparser.GetString(config, \"PN_GLOBAL_ROUTER\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_ROUTER %v:\", err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_PORTAL, err = jsonparser.GetString(config, \"PN_GLOBAL_PORTAL\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_PORTAL %v:\", err)\n\t}\n\n\tbaseCfg.PN_GLOBAL_JWT_PASSPHRASE, err = jsonparser.GetString(config, \"PN_GLOBAL_JWT_PASSPHRASE\")\n\tif err != nil {\n\t\tlog.Fatalf(\"PN_GLOBAL_JWT_PASSPHRASE %v:\", err)\n\t}\n\n\tbaseCfg.MY_POD_NAMESPACE = os.Getenv(\"MY_POD_NAMESPACE\")\n\n\tbaseCfg.DB_PATH = os.Getenv(\"DB_PATH\")\n\tif baseCfg.DB_PATH == \"\" {\n\t\tbaseCfg.DB_PATH = \"./marvin-connector.db\"\n\t}\n\n\tbaseCfg.CONNECTOR_ADDRESS = os.Getenv(\"CONNECTOR_ADDRESS\")\n\n\tbaseCfg.NEW_PUBLISHED_POLLING_INTERVAL = os.Getenv(\"NEW_PUBLISHED_POLLING_INTERVAL\")\n\tif baseCfg.NEW_PUBLISHED_POLLING_INTERVAL == \"\" {\n\t\tbaseCfg.NEW_PUBLISHED_POLLING_INTERVAL = \"2h\"\n\t}\n\n\tb, err := strconv.ParseBool(os.Getenv(\"CHECK_UPGRADE_STATUS_ENABLED\"))\n\tif err != nil {\n\t\tlog.Printf(\"[Warning] parse bool CHECK_UPGRADE_STATUS_ENABLED failed. Not a boolean\")\n\t\tbaseCfg.CHECK_UPGRADE_STATUS_ENABLED = false\n\t} else {\n\t\tbaseCfg.CHECK_UPGRADE_STATUS_ENABLED = b\n\t}\n\n\tbaseCfg.UPGRADE_STATUS_POLLING_INTERVAL = os.Getenv(\"UPGRADE_STATUS_POLLING_INTERVAL\")\n\tif baseCfg.UPGRADE_STATUS_POLLING_INTERVAL == \"\" {\n\t\tbaseCfg.UPGRADE_STATUS_POLLING_INTERVAL = \"1m\"\n\t}\n\n\tbaseCfg.JOB_TIMEOUT = os.Getenv(\"JOB_TIMEOUT\")\n\tif baseCfg.JOB_TIMEOUT == \"\" {\n\t\tbaseCfg.JOB_TIMEOUT = \"2m\"\n\t}\n\n\tlog.Printf(\"SECRET.PN_GLOBAL_ROUTER %v\", baseCfg.PN_GLOBAL_ROUTER)\n\tlog.Printf(\"SECRET.PN_GLOBAL_PORTAL %v\", baseCfg.PN_GLOBAL_PORTAL)\n\tlog.Printf(\"ENV.MY_POD_NAMESPACE %v\", baseCfg.MY_POD_NAMESPACE)\n\tlog.Printf(\"ENV.DB_PATH %v\", baseCfg.DB_PATH)\n\tlog.Printf(\"ENV.CONNECTOR_ADDRESS %v\", baseCfg.CONNECTOR_ADDRESS)\n\tlog.Printf(\"ENV.NEW_PUBLISHED_POLLING_INTERVAL %v\", baseCfg.NEW_PUBLISHED_POLLING_INTERVAL)\n\tlog.Printf(\"ENV.UPGRADE_STATUS_POLLING_INTERVAL %v\", baseCfg.UPGRADE_STATUS_POLLING_INTERVAL)\n\tlog.Printf(\"ENV.JOB_TIMEOUT %v\", baseCfg.JOB_TIMEOUT)\n\n}", "func (c *criService) initPlatform() error {\n\tpluginDirs := map[string]string{\n\t\tdefaultNetworkPlugin: c.config.NetworkPluginConfDir,\n\t}\n\tfor name, conf := range c.config.Runtimes {\n\t\tif conf.NetworkPluginConfDir != \"\" {\n\t\t\tpluginDirs[name] = conf.NetworkPluginConfDir\n\t\t}\n\t}\n\n\tc.netPlugin = make(map[string]cni.CNI)\n\tfor name, dir := range pluginDirs {\n\t\tmax := c.config.NetworkPluginMaxConfNum\n\t\tif name != defaultNetworkPlugin {\n\t\t\tif m := c.config.Runtimes[name].NetworkPluginMaxConfNum; m != 0 {\n\t\t\t\tmax = m\n\t\t\t}\n\t\t}\n\t\t// For windows, the loopback network is added as default.\n\t\t// There is no need to explicitly add one hence networkAttachCount is 1.\n\t\t// If there are more network configs the pod will be attached to all the\n\t\t// networks but we will only use the ip of the default network interface\n\t\t// as the pod IP.\n\t\ti, err := cni.New(cni.WithMinNetworkCount(windowsNetworkAttachCount),\n\t\t\tcni.WithPluginConfDir(dir),\n\t\t\tcni.WithPluginMaxConfNum(max),\n\t\t\tcni.WithPluginDir([]string{c.config.NetworkPluginBinDir}))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to initialize cni: %w\", err)\n\t\t}\n\t\tc.netPlugin[name] = i\n\t}\n\n\treturn nil\n}", "func init() {\n\tcli.InitConfig(configName)\n}", "func init() {\n\tcli.InitConfig(configName)\n}", "func Init(c Config) error {\n\terr := c.validate()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tapiKEY = c.APIKey\n\tlog = c.Logger\n\tlistMapper = c.ListMapper\n\n\treturn nil\n}", "func configureCAVolumes(d *appsv1.Deployment, tenants *lokiv1.TenantsSpec) error {\n\tif tenants.Authentication == nil {\n\t\treturn nil // nothing to do\n\t}\n\n\tmountCAConfigMap := func(container *corev1.Container, volumes *[]corev1.Volume, tennantName, configmapName string) {\n\t\tcontainer.VolumeMounts = append(container.VolumeMounts, corev1.VolumeMount{\n\t\t\tName: tenantCAVolumeName(tennantName),\n\t\t\tMountPath: tenantCADir(tennantName),\n\t\t})\n\t\t*volumes = append(*volumes, corev1.Volume{\n\t\t\tName: tenantCAVolumeName(tennantName),\n\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\tConfigMap: &corev1.ConfigMapVolumeSource{\n\t\t\t\t\tLocalObjectReference: corev1.LocalObjectReference{\n\t\t\t\t\t\tName: configmapName,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t})\n\t}\n\n\tvar gwIndex int\n\tfor i, c := range d.Spec.Template.Spec.Containers {\n\t\tif c.Name == gatewayContainerName {\n\t\t\tgwIndex = i\n\t\t\tbreak\n\t\t}\n\t}\n\n\tgwContainer := d.Spec.Template.Spec.Containers[gwIndex].DeepCopy()\n\tgwArgs := gwContainer.Args\n\tgwVolumes := d.Spec.Template.Spec.Volumes\n\n\tmTLS := false\n\tfor _, tenant := range tenants.Authentication {\n\t\tswitch {\n\t\tcase tenant.OIDC != nil:\n\t\t\tif tenant.OIDC.IssuerCA != nil {\n\t\t\t\tmountCAConfigMap(gwContainer, &gwVolumes, tenant.TenantName, tenant.OIDC.IssuerCA.CA)\n\t\t\t}\n\t\tcase tenant.MTLS != nil:\n\t\t\tmountCAConfigMap(gwContainer, &gwVolumes, tenant.TenantName, tenant.MTLS.CA.CA)\n\t\t\tmTLS = true\n\t\t}\n\t}\n\n\tif mTLS {\n\t\t// Remove old tls.client-auth-type\n\t\tfor i, arg := range gwArgs {\n\t\t\tif strings.HasPrefix(arg, \"--tls.client-auth-type=\") {\n\t\t\t\tgwArgs = append(gwArgs[:i], gwArgs[i+1:]...)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tgwArgs = append(gwArgs, \"--tls.client-auth-type=RequestClientCert\")\n\t\tgwContainer.Args = gwArgs\n\t}\n\n\tp := corev1.PodSpec{\n\t\tContainers: []corev1.Container{\n\t\t\t*gwContainer,\n\t\t},\n\t\tVolumes: gwVolumes,\n\t}\n\tif err := mergo.Merge(&d.Spec.Template.Spec, p, mergo.WithOverride); err != nil {\n\t\treturn kverrors.Wrap(err, \"failed to merge server pki into container spec \")\n\t}\n\treturn nil\n}", "func config() *clientcmdapi.Config {\n\treturn &clientcmdapi.Config{\n\t\tContexts: map[string]*clientcmdapi.Context{\n\t\t\t\"foo\": {\n\t\t\t\tCluster: \"foocluster\",\n\t\t\t\tAuthInfo: \"fooauthinfo\",\n\t\t\t\tNamespace: \"foonamespace\",\n\t\t\t},\n\t\t\t\"bar\": {\n\t\t\t\tCluster: \"barcluster\",\n\t\t\t\tNamespace: \"barnamespace\",\n\t\t\t},\n\t\t},\n\t\tCurrentContext: \"foo\",\n\t\tClusters: map[string]*clientcmdapi.Cluster{\n\t\t\t\"foocluster\": {\n\t\t\t\tServer: \"http://foo.io\",\n\t\t\t},\n\t\t\t\"barcluster\": {\n\t\t\t\tServer: \"http://bar.io\",\n\t\t\t},\n\t\t},\n\t\tAuthInfos: map[string]*clientcmdapi.AuthInfo{\n\t\t\t\"fooauthinfo\": {\n\t\t\t\tUsername: \"foouser\",\n\t\t\t},\n\t\t},\n\t}\n}", "func InitializeConfig() {\n\t//\tviper.SetConfigFile(CfgFile)\n\n\tviper.RegisterAlias(\"indexes\", \"taxonomies\")\n\n\tLoadDefaultSettings()\n\n\tif ipvanishCmdV.PersistentFlags().Lookup(\"sort\").Changed {\n\t\tviper.Set(\"sort\", Sort)\n\t}\n\n\t//log.Debugf(\"Using config file: %s\", viper.ConfigFileUsed())\n\n}", "func configInit() {\n\tLoadConfig(configPath, \"config\")\n\tConfig().SetDefault(\"log-level\", \"debug\")\n\tConfig().SetDefault(\"addr\", \"localhost:8081\")\n}", "func (a *App) Init(configPath string, site string) error {\n\tif a.isInitialized {\n\t\treturn ErrAlreadyInitialized\n\t}\n\n\t// load config from file\n\tconfig, err := NewConfig(configPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\ta.config = config\n\n\ta.site = site\n\ta.bitly = NewBitlyClient(a.config.Bitly.AccessToken)\n\ta.twilio = NewTwilioClient(a.config.Twilio.AccountSID, a.config.Twilio.AuthToken)\n\n\t// initialize the collector\n\tc := colly.NewCollector(\n\t\tcolly.AllowURLRevisit(),\n\t)\n\tc.OnRequest(func(_ *colly.Request) {\n\t\ta.reset()\n\t})\n\tc.OnHTML(\"ul.rows\", func(e *colly.HTMLElement) {\n\t\te.ForEachWithBreak(\"li.result-row,h4.nearby\", a.handleRow)\n\t})\n\ta.collector = c\n\n\treturn nil\n}", "func (cfg *Config) init() {\n\tcfg.Version = viper.GetString(\"version\")\n\tcfg.setLogLevel(viper.GetString(\"log_level\"))\n\tcfg.AppName = viper.GetString(\"app_name\")\n\tcfg.AppShortName = viper.GetString(\"app_short_name\")\n\n\tcfg.API.UsingHttps = viper.GetBool(\"api.usingHttps\")\n\tcfg.API.Port = viper.GetInt(\"api.port\")\n\tcfg.API.AllowedMethods = viper.GetStringSlice(\"api.allowed_methods\")\n\tcfg.API.AllowedHeaders = viper.GetStringSlice(\"api.allowed_headers\")\n\tcfg.API.AllowedOrigins = viper.GetStringSlice(\"api.allowed_origins\")\n\n\tcfg.Database.Host = viper.GetString(\"database.host\")\n\tcfg.Database.Port = viper.GetInt(\"database.port\")\n\tcfg.Database.Db = viper.GetString(\"database.database\")\n\tcfg.Database.User = viper.GetString(\"database.user\")\n\tcfg.Database.Password = viper.GetString(\"database.password\")\n\tcfg.Database.SSLMode = viper.GetString(\"database.sslmode\")\n\n\tcfg.Keys.CSRFKey = viper.GetString(\"secrets.csrf\")\n\tcfg.Keys.JWTSecret = viper.GetString(\"secrets.jwtsecret\")\n\tcfg.Keys.ApiLogin = viper.GetString(\"secrets.api_login\")\n}", "func (c *ClusterManager) Init(zl instances.ZoneLister, pp backends.ProbeProvider) {\n\tc.instancePool.Init(zl)\n\tc.backendPool.Init(pp)\n\t// TODO: Initialize other members as needed.\n}", "func (list *APTAuditList) initClients() {\n\tif list.listClient == nil {\n\t\tmaxKeys := int64(util.Min(list.limit, ITEMS_PER_REQUEST))\n\t\tlist.listClient = network.NewS3ObjectList(\n\t\t\tos.Getenv(\"AWS_ACCESS_KEY_ID\"),\n\t\t\tos.Getenv(\"AWS_SECRET_ACCESS_KEY\"),\n\t\t\tlist.region, list.bucket, maxKeys)\n\t\tlist.headClients = make([]*network.S3Head, list.concurrency)\n\t\tfor i := 0; i < list.concurrency; i++ {\n\t\t\tlist.headClients[i] = network.NewS3Head(\n\t\t\t\tos.Getenv(\"AWS_ACCESS_KEY_ID\"),\n\t\t\t\tos.Getenv(\"AWS_SECRET_ACCESS_KEY\"),\n\t\t\t\tlist.region, list.bucket)\n\t\t}\n\t}\n}", "func initConfig() {\n\tif !debug {\n\t\tlog.AllowLevel(\"debug\")\n\t}\n\n\tconfig = &registrar.Config{}\n\tconfigPath, err := os.UserConfigDir()\n\tif err != nil {\n\t\t// TODO handle this\n\t\tpanic(\"cannot retrieve the user configuration directory\")\n\t}\n\tdefaultCfgPath := path.Join(configPath, \"cosmos\", \"registry\")\n\tafero.NewOsFs().MkdirAll(defaultCfgPath, 0700)\n\n\tviper.SetConfigType(\"yaml\")\n\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\tviper.AddConfigPath(defaultCfgPath)\n\t\tviper.SetConfigName(\"config\")\n\t}\n\n\t// set the workspace folder\n\tcfgFilePath := viper.ConfigFileUsed()\n\tif cfgFilePath == \"\" {\n\t\tcfgFilePath = path.Join(defaultCfgPath, \"config.yaml\")\n\t}\n\t//now read in the config\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tviper.Unmarshal(config)\n\t\tlogger.Debug(\"Using config file at \", \"config\", viper.ConfigFileUsed())\n\t} else {\n\t\tswitch err.(type) {\n\t\tcase viper.ConfigFileNotFoundError:\n\t\t\tif noInteraction {\n\t\t\t\tprintln(\"config file not found\")\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t\tif err = interactiveSetup(); err != nil {\n\t\t\t\tprintln(\"unexpected error \", err.Error())\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t\tviper.Unmarshal(config)\n\t\t\tprintln(\"\\nThe configuration is:\")\n\t\t\tprompts.PrettyMap(viper.AllSettings())\n\t\t\tprintln()\n\t\t\tif ok := prompts.Confirm(false, \"save the configuration?\"); !ok {\n\t\t\t\tprintln(\"aborting, run the command again to change the configuration\")\n\t\t\t\tos.Exit(0)\n\t\t\t}\n\n\t\t\tif err = viper.WriteConfigAs(cfgFilePath); err != nil {\n\t\t\t\tprintln(\"aborting, error writing the configuration file:\", err.Error())\n\t\t\t\tos.Exit(1)\n\t\t\t}\n\t\t\tprintln(\"config file saved in \", cfgFilePath)\n\n\t\tdefault:\n\t\t\tprintln(\"the configuration file appers corrupted: \", err.Error())\n\t\t\tos.Exit(1)\n\t\t}\n\t}\n\t// set the config workspace folder\n\tconfig.Workspace = path.Dir(cfgFilePath)\n}", "func initializeConfig() (*viper.Viper, error) {\n\tv = viper.New()\n\n\tv.SetEnvPrefix(\"DCC\")\n\tv.AutomaticEnv()\n\n\tif cfgFile != \"\" {\n\t\tv.SetConfigFile(cfgFile)\n\t} else {\n\t\tif cfgPath == \"\" {\n\t\t\tv.AddConfigPath(\".\")\n\t\t} else {\n\t\t\tv.AddConfigPath(cfgPath)\n\t\t}\n\t}\n\n\terr := v.ReadInConfig()\n\tif err != nil {\n\t\tif _, ok := err.(viper.ConfigParseError); !ok {\n\t\t\treturn v, fmt.Errorf(\"unable to parse Config file : %v\", err)\n\t\t}\n\t}\n\tm := v.GetStringMap(\"agent\")\n\n\tif level := os.Getenv(\"LOG_LEVEL\"); level != \"\" {\n\t\tm[\"loglevel\"] = level\n\t}\n\n\tif EnvUUID := os.Getenv(\"UUID\"); EnvUUID != \"\" {\n\t\tm[\"uuid\"] = EnvUUID\n\t}\n\tif env := os.Getenv(\"ENV\"); env != \"\" {\n\t\tm[\"env\"] = env\n\t}\n\n\tif pwd := os.Getenv(\"PASSWORD\"); pwd != \"\" {\n\t\tm[\"password\"] = pwd\n\t}\n\tif port := v.GetInt(\"agent.healthport\"); port != 0 {\n\t\tm[\"healthport\"] = port\n\t} else {\n\t\tm[\"healthport\"] = 8080\n\t}\n\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\treturn v, fmt.Errorf(\"unable to get hostname : %v\", err)\n\t}\n\tm[\"hostname\"] = hostname\n\tm[\"version\"] = version + \".\" + githash\n\tm[\"date\"] = date\n\n\tu1, ok := m[\"uuid\"]\n\tif ok {\n\t\tif _, err = uuid.Parse(u1.(string)); err != nil {\n\t\t\treturn v, fmt.Errorf(\"unable to Parse uuid : %v\", err)\n\t\t}\n\t} else {\n\t\tm[\"uuid\"] = uuid.New()\n\t}\n\n\tv.Set(\"agent\", m)\n\n\tc := v.GetStringMap(\"controller\")\n\tif ctrlWorker := os.Getenv(\"CTRL_WORKER\"); ctrlWorker != \"\" {\n\t\tc[\"worker\"], err = strconv.Atoi(ctrlWorker)\n\t\tif err != nil {\n\t\t\tc[\"worker\"] = 1\n\t\t}\n\t}\n\n\tv.Set(\"controller\", c)\n\n\treturn v, nil\n}", "func initConfig() {\n\tif verbose {\n\t\tutils.EnableVerboseMode()\n\t\tt := time.Now()\n\t\tutils.Logf(\"Executed ImportExportCLI (%s) on %v\\n\", utils.MICmd, t.Format(time.RFC1123))\n\t}\n\n\tutils.Logln(utils.LogPrefixInfo+\"Insecure:\", insecure)\n\tif insecure {\n\t\tutils.Insecure = true\n\t}\n}", "func (s *Server) createDefaultCAConfigs(cacount int) error {\n\tlog.Debugf(\"Creating %d default CA configuration files\", cacount)\n\n\tcashome, err := util.MakeFileAbs(\"ca\", s.HomeDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tos.Mkdir(cashome, 0755)\n\n\tfor i := 1; i <= cacount; i++ {\n\t\tcahome := fmt.Sprintf(cashome+\"/ca%d\", i)\n\t\tcfgFileName := filepath.Join(cahome, \"fabric-ca-config.yaml\")\n\n\t\tcaName := fmt.Sprintf(\"ca%d\", i)\n\t\tcfg := strings.Replace(defaultCACfgTemplate, \"<<<CANAME>>>\", caName, 1)\n\n\t\tcn := fmt.Sprintf(\"fabric-ca-server-ca%d\", i)\n\t\tcfg = strings.Replace(cfg, \"<<<COMMONNAME>>>\", cn, 1)\n\n\t\tdatasource := dbutil.GetCADataSource(s.CA.Config.DB.Type, s.CA.Config.DB.Datasource, i)\n\t\tcfg = strings.Replace(cfg, \"<<<DATASOURCE>>>\", datasource, 1)\n\n\t\ts.Config.CAfiles = append(s.Config.CAfiles, cfgFileName)\n\n\t\t// Now write the file\n\t\terr := os.MkdirAll(filepath.Dir(cfgFileName), 0755)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = ioutil.WriteFile(cfgFileName, []byte(cfg), 0644)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t}\n\treturn nil\n}", "func initConfig() {\n\tif cfgFile != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(cfgFile)\n\t} else {\n\t\t// Find home directory.\n\t\thome, err := homedir.Dir()\n\t\tif err != nil {\n\t\t\tfmt.Println(err)\n\t\t\tos.Exit(1)\n\t\t}\n\n\t\t// Search config in home directory with name \".go-airshare\" (without extension).\n\t\tviper.AddConfigPath(home)\n\t\tviper.SetConfigName(\".go-airshare\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\n\t// If a config file is found, read it in.\n\tif err := viper.ReadInConfig(); err == nil {\n\t\tfmt.Println(\"Using config file:\", viper.ConfigFileUsed())\n\t}\n}", "func init() {\n\tvar err error\n\n\tnodeUrl, found := os.LookupEnv(EnvNameSerNetworkFullNodes)\n\tif found {\n\t\tblockChainMonitorUrl = strings.Split(nodeUrl, \",\")\n\t}\n\n\tif v, found := os.LookupEnv(EnvNameWorkerNumExecuteTask); found {\n\t\tworkerNumExecuteTask, err = strconv.Atoi(v)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(\"Can't convert str to int\", logger.String(EnvNameWorkerNumExecuteTask, v))\n\t\t}\n\t}\n\n\tif v, found := os.LookupEnv(EnvNameWorkerMaxSleepTime); found {\n\t\tworkerMaxSleepTime, err = strconv.Atoi(v)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(\"Can't convert str to int\", logger.String(EnvNameWorkerMaxSleepTime, v))\n\t\t}\n\t}\n\tif v, ok := os.LookupEnv(EnvNameBech32ChainPrefix); ok {\n\t\tbech32ChainPrefix = v\n\t}\n\tif v, found := os.LookupEnv(EnvNameBlockNumPerWorkerHandle); found {\n\t\tblockNumPerWorkerHandle, err = strconv.Atoi(v)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(\"Can't convert str to int\", logger.String(EnvNameBlockNumPerWorkerHandle, v))\n\t\t}\n\t}\n\tif v, ok := os.LookupEnv(EnvNameBehindBlockNum); ok {\n\t\tif n, err := strconv.Atoi(v); err != nil {\n\t\t\tlogger.Fatal(\"convert str to int fail\", logger.String(EnvNameBehindBlockNum, v))\n\t\t} else {\n\t\t\tbehindBlockNum = n\n\t\t}\n\t}\n\tif v, ok := os.LookupEnv(EnvNamePromethousPort); ok {\n\t\tif n, err := strconv.Atoi(v); err != nil {\n\t\t\tlogger.Fatal(\"convert str to int fail\", logger.String(EnvNamePromethousPort, v))\n\t\t} else {\n\t\t\tpromethousPort = n\n\t\t}\n\t}\n\tSvrConf = &ServerConf{\n\t\tNodeUrls: blockChainMonitorUrl,\n\t\tWorkerNumCreateTask: workerNumCreateTask,\n\t\tWorkerNumExecuteTask: workerNumExecuteTask,\n\t\tWorkerMaxSleepTime: workerMaxSleepTime,\n\t\tBlockNumPerWorkerHandle: blockNumPerWorkerHandle,\n\n\t\tMaxConnectionNum: maxConnectionNum,\n\t\tInitConnectionNum: initConnectionNum,\n\t\tBehindBlockNum: behindBlockNum,\n\t\tBech32ChainPrefix: bech32ChainPrefix,\n\t\tPromethousPort: promethousPort,\n\t}\n\tlogger.Debug(\"print server config\", logger.String(\"serverConf\", utils.MarshalJsonIgnoreErr(SvrConf)))\n}", "func Initialize(centralCfg config.CentralConfig) error {\n\tagent.cfg = centralCfg.(*config.CentralConfiguration)\n\tagent.apiMap = cache.New()\n\n\t// validate the central config\n\terr := config.ValidateConfig(centralCfg)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Init apic client\n\tagent.apicClient = apic.New(centralCfg)\n\tinitializeTokenRequester(centralCfg)\n\n\tif getAgentResourceType() != \"\" {\n\t\t// Get Agent Resources\n\t\terr = RefreshResources()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// merge agent resource config with central config\n\t\tmergeResourceWithConfig()\n\t\t// Do we still want to validate central config after merge???\n\n\t\tupdateAgentStatus(AgentRunning, \"\")\n\t} else if agent.cfg.AgentName != \"\" {\n\t\treturn errors.Wrap(apic.ErrCentralConfig, \"Agent name cannot be set. Config is used only for agents with API server resource definition\")\n\t}\n\n\tsetupSignalProcessor()\n\t// only do the periodic healthcheck stuff if NOT in unit tests, or the tests will fail\n\tif flag.Lookup(\"test.v\") == nil {\n\t\t// only do continuous healthchecking in binary agents\n\t\tif !isRunningInDockerContainer() {\n\t\t\tgo runPeriodicHealthChecks()\n\t\t}\n\t}\n\n\tstartAPIServiceCache()\n\n\treturn nil\n}", "func (b *Bootstrapper) Configure(cs ...Configurator) {\n\tfor _, c := range cs {\n\t\tc(b)\n\t}\n}", "func (b *Bootstrapper) Configure(cs ...Configurator) {\n\tfor _, c := range cs {\n\t\tc(b)\n\t}\n}", "func initConfig() {\n\tviper.SetDefault(\"metadata\", \"datamon-meta-data\")\n\tviper.SetDefault(\"blob\", \"datamon-blob-data\")\n\tviper.SetDefault(\"label\", \"datamon-label-data\")\n\tif os.Getenv(\"DATAMON_CONFIG\") != \"\" {\n\t\t// Use config file from the flag.\n\t\tviper.SetConfigFile(os.Getenv(\"DATAMON_CONFIG\"))\n\t} else {\n\t\tviper.AddConfigPath(\".\")\n\t\tviper.AddConfigPath(\"$HOME/.datamon\")\n\t\tviper.AddConfigPath(\"/etc/datamon\")\n\t\tviper.SetConfigName(\"datamon\")\n\t}\n\n\tviper.AutomaticEnv() // read in environment variables that match\n\t// If a config file is found, read it in.\n\tviper.ReadInConfig() // nolint:errcheck\n\t// `viper.ConfigFileUsed()` returns path to config file if error is nil\n\tvar err error\n\tconfig, err = newConfig()\n\tif err != nil {\n\t\tlogFatalln(err)\n\t}\n\tconfig.setRepoParams(&params)\n\tif config.Credential != \"\" {\n\t\t// Always pick the config file. There can be a duplicate bucket name in a different project, avoid wrong environment\n\t\t// variable from dev testing from screwing things up..\n\t\t_ = os.Setenv(\"GOOGLE_APPLICATION_CREDENTIALS\", config.Credential)\n\t}\n}", "func (c *Config) InitializeApp() error {\n\tc.sh = core.NewScheduler(c.logger)\n\tc.buildSchedulerMiddlewares(c.sh)\n\n\tvar err error\n\tc.dockerHandler, err = NewDockerHandler(c, c.logger)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor name, j := range c.ExecJobs {\n\t\tdefaults.SetDefaults(j)\n\t\tj.Client = c.dockerHandler.GetInternalDockerClient()\n\t\tj.Name = name\n\t\tj.buildMiddlewares()\n\t\tc.sh.AddJob(j)\n\t}\n\n\tfor name, j := range c.RunJobs {\n\t\tdefaults.SetDefaults(j)\n\t\tj.Client = c.dockerHandler.GetInternalDockerClient()\n\t\tj.Name = name\n\t\tj.buildMiddlewares()\n\t\tc.sh.AddJob(j)\n\t}\n\n\tfor name, j := range c.LocalJobs {\n\t\tdefaults.SetDefaults(j)\n\t\tj.Name = name\n\t\tj.buildMiddlewares()\n\t\tc.sh.AddJob(j)\n\t}\n\n\tfor name, j := range c.ServiceJobs {\n\t\tdefaults.SetDefaults(j)\n\t\tj.Name = name\n\t\tj.Client = c.dockerHandler.GetInternalDockerClient()\n\t\tj.buildMiddlewares()\n\t\tc.sh.AddJob(j)\n\t}\n\n\treturn nil\n}", "func init() {\n\t// ensure we log all shell execs\n\tlog.SetLevel(log.DebugLevel)\n\t// set-up variables\n\tconfig := getKubeConfig(\"\", clientcmd.ConfigOverrides{})\n\tAppClientset = appclientset.NewForConfigOrDie(config)\n\tKubeClientset = kubernetes.NewForConfigOrDie(config)\n\tapiServerAddress = os.Getenv(argocdclient.EnvArgoCDServer)\n\tif apiServerAddress == \"\" {\n\t\tapiServerAddress = defaultApiServer\n\t}\n\n\ttlsTestResult, err := grpcutil.TestTLS(apiServerAddress)\n\tCheckError(err)\n\n\tArgoCDClientset, err = argocdclient.NewClient(&argocdclient.ClientOptions{Insecure: true, ServerAddr: apiServerAddress, PlainText: !tlsTestResult.TLS})\n\tCheckError(err)\n\n\tcloser, client, err := ArgoCDClientset.NewSessionClient()\n\tCheckError(err)\n\tdefer util.Close(closer)\n\n\tsessionResponse, err := client.Create(context.Background(), &sessionpkg.SessionCreateRequest{Username: \"admin\", Password: adminPassword})\n\tCheckError(err)\n\n\tArgoCDClientset, err = argocdclient.NewClient(&argocdclient.ClientOptions{\n\t\tInsecure: true,\n\t\tServerAddr: apiServerAddress,\n\t\tAuthToken: sessionResponse.Token,\n\t\tPlainText: !tlsTestResult.TLS,\n\t})\n\tCheckError(err)\n\n\tsettingsManager = settings.NewSettingsManager(context.Background(), KubeClientset, \"argocd-e2e\")\n\ttoken = sessionResponse.Token\n\tplainText = !tlsTestResult.TLS\n\n\tlog.WithFields(log.Fields{\"apiServerAddress\": apiServerAddress}).Info(\"initialized\")\n}", "func initApplicationConfiguration() {\n var emptyConfigParam string = \"\"\n\n config.InitApp(emptyConfigParam)\n config.InitDatabase(emptyConfigParam)\n config.InitRoutes(emptyConfigParam)\n}" ]
[ "0.6121663", "0.6120701", "0.61140627", "0.61080307", "0.60688233", "0.6064249", "0.6061599", "0.60382515", "0.6005175", "0.6001494", "0.5947007", "0.5939059", "0.5821225", "0.57854235", "0.5785131", "0.5784421", "0.57815313", "0.57142043", "0.5688978", "0.56886387", "0.56671745", "0.5634552", "0.5631826", "0.56283605", "0.56248814", "0.56046176", "0.55989766", "0.5590329", "0.5572384", "0.5557916", "0.554743", "0.55409336", "0.5532108", "0.5528957", "0.5516706", "0.549796", "0.5482841", "0.54783523", "0.5463302", "0.5459094", "0.5444085", "0.54403466", "0.54202896", "0.5406599", "0.5405646", "0.54043573", "0.5393779", "0.5392054", "0.5389386", "0.53889775", "0.5384157", "0.5379609", "0.5378773", "0.53768605", "0.53767306", "0.5371679", "0.5369321", "0.53654385", "0.5357247", "0.53475624", "0.5342304", "0.5341343", "0.53405434", "0.5338097", "0.5336052", "0.53344315", "0.53333825", "0.5330261", "0.5318122", "0.53088844", "0.53039765", "0.5303307", "0.5303067", "0.5301759", "0.53004", "0.5290165", "0.5289776", "0.5289776", "0.5289626", "0.5282659", "0.527881", "0.5278416", "0.5275708", "0.5273243", "0.5271493", "0.5271304", "0.5262948", "0.5262156", "0.5257911", "0.5255439", "0.5254094", "0.52515227", "0.5250571", "0.5250501", "0.52446836", "0.52446836", "0.5238124", "0.52363926", "0.5234704", "0.52327526" ]
0.7031829
0
loadCAConfig loads up a CA's configuration from the specified CA configuration file
func (s *Server) loadCA(caFile string, renew bool) error { log.Infof("Loading CA from %s", caFile) var err error if !util.FileExists(caFile) { return errors.Errorf("%s file does not exist", caFile) } // Creating new Viper instance, to prevent any server level environment variables or // flags from overridding the configuration options specified in the // CA config file cfg := &CAConfig{} caViper := viper.New() err = UnmarshalConfig(cfg, caViper, caFile, false) if err != nil { return err } // Need to error if no CA name provided in config file, we cannot revert to using // the name of default CA cause CA names must be unique caName := cfg.CA.Name if caName == "" { return errors.Errorf("No CA name provided in CA configuration file. CA name is required in %s", caFile) } // Replace missing values in CA configuration values with values from the // default CA configuration util.CopyMissingValues(s.CA.Config, cfg) // Integers and boolean values are handled outside the util.CopyMissingValues // because there is no way through reflect to detect if a value was explicitly // set to 0 or false, or it is using the default value for its type. Viper is // employed here to help detect. if !caViper.IsSet("registry.maxenrollments") { cfg.Registry.MaxEnrollments = s.CA.Config.Registry.MaxEnrollments } if !caViper.IsSet("db.tls.enabled") { cfg.DB.TLS.Enabled = s.CA.Config.DB.TLS.Enabled } log.Debugf("CA configuration after checking for missing values: %+v", cfg) ca, err := newCA(caFile, cfg, s, renew) if err != nil { return err } err = s.addCA(ca) if err != nil { err2 := ca.closeDB() if err2 != nil { log.Errorf("Close DB failed: %s", err2) } } return err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func loadCA(caFile string) *x509.CertPool {\n\tpool := x509.NewCertPool()\n\n\tif ca, err := ioutil.ReadFile(caFile); err != nil {\n\t\tlog.Fatal(\"Fatal Error at Certification ReadFile: \", err)\n\t} else {\n\t\tpool.AppendCertsFromPEM(ca)\n\t}\n\treturn pool\n}", "func loadCA(caFile string) *x509.CertPool {\n\tpool := x509.NewCertPool()\n\n\tif ca, e := ioutil.ReadFile(caFile); e != nil {\n\t\tlog.Fatal(\"ReadFile: \", e)\n\t} else {\n\t\tpool.AppendCertsFromPEM(ca)\n\t}\n\treturn pool\n}", "func LoadConfig(path string) (*Config, error) {\n\tif path == \"\" {\n\t\treturn nil, errors.New(\"invalid path\")\n\t}\n\n\tbody, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, errors.WithMessage(err, \"unable to read configuration file\")\n\t}\n\n\tvar cfg = new(Config)\n\tif strings.HasSuffix(path, \".json\") {\n\t\terr = json.Unmarshal(body, cfg)\n\t} else {\n\t\terr = yaml.Unmarshal(body, cfg)\n\t}\n\n\tif err != nil {\n\t\treturn nil, errors.WithMessage(err, \"failed to unmarshal configuration\")\n\t}\n\n\tif len(cfg.Profiles) == 0 {\n\t\treturn nil, errors.New(\"no \\\"profiles\\\" configuration present\")\n\t}\n\n\tif cfg.Profiles[\"default\"] == nil {\n\t\tlogger.Infof(\"reason=no_default_profile\")\n\t\tcfg.Profiles[\"default\"] = DefaultCertProfile()\n\t}\n\n\tif cfg.Authority != nil && cfg.Authority.DefaultAIA != nil {\n\t\tfor i := range cfg.Authority.Issuers {\n\t\t\tiss := &cfg.Authority.Issuers[i]\n\t\t\tif iss.AIA == nil {\n\t\t\t\tiss.AIA = cfg.Authority.DefaultAIA.Copy()\n\t\t\t} else {\n\t\t\t\tif iss.AIA.AiaURL == \"\" {\n\t\t\t\t\tiss.AIA.AiaURL = cfg.Authority.DefaultAIA.AiaURL\n\t\t\t\t}\n\t\t\t\tif iss.AIA.CrlURL == \"\" {\n\t\t\t\t\tiss.AIA.CrlURL = cfg.Authority.DefaultAIA.CrlURL\n\t\t\t\t}\n\t\t\t\tif iss.AIA.OcspURL == \"\" {\n\t\t\t\t\tiss.AIA.OcspURL = cfg.Authority.DefaultAIA.OcspURL\n\t\t\t\t}\n\t\t\t\tif iss.AIA.CRLExpiry == 0 {\n\t\t\t\t\tiss.AIA.CRLExpiry = cfg.Authority.DefaultAIA.GetCRLExpiry()\n\t\t\t\t}\n\t\t\t\tif iss.AIA.CRLRenewal == 0 {\n\t\t\t\t\tiss.AIA.CRLRenewal = cfg.Authority.DefaultAIA.GetCRLRenewal()\n\t\t\t\t}\n\t\t\t\tif iss.AIA.OCSPExpiry == 0 {\n\t\t\t\t\tiss.AIA.OCSPExpiry = cfg.Authority.DefaultAIA.GetOCSPExpiry()\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tiss.Profiles = make(map[string]*CertProfile)\n\t\t\tfor name, profile := range cfg.Profiles {\n\t\t\t\tif profile.IssuerLabel == iss.Label ||\n\t\t\t\t\t(profile.IssuerLabel == \"\" && len(cfg.Authority.Issuers) == 1) {\n\t\t\t\t\tiss.Profiles[name] = profile\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif err = cfg.Validate(); err != nil {\n\t\treturn nil, errors.WithMessage(err, \"invalid configuration\")\n\t}\n\n\treturn cfg, nil\n}", "func (c *IdentityConfig) loadCATLSConfig(configEntity *identityConfigEntity) error {\n\t//CA Config\n\tfor ca, caConfig := range configEntity.CertificateAuthorities {\n\t\t//resolve paths\n\t\tcaConfig.TLSCACerts.Path = pathvar.Subst(caConfig.TLSCACerts.Path)\n\t\tcaConfig.TLSCACerts.Client.Key.Path = pathvar.Subst(caConfig.TLSCACerts.Client.Key.Path)\n\t\tcaConfig.TLSCACerts.Client.Cert.Path = pathvar.Subst(caConfig.TLSCACerts.Client.Cert.Path)\n\t\t//pre load key and cert bytes\n\t\terr := caConfig.TLSCACerts.Client.Key.LoadBytes()\n\t\tif err != nil {\n\t\t\treturn errors.WithMessage(err, \"failed to load ca key\")\n\t\t}\n\n\t\terr = caConfig.TLSCACerts.Client.Cert.LoadBytes()\n\t\tif err != nil {\n\t\t\treturn errors.WithMessage(err, \"failed to load ca cert\")\n\t\t}\n\t\tconfigEntity.CertificateAuthorities[ca] = caConfig\n\t}\n\n\treturn nil\n}", "func loadConfig() (*provider.Config, error) {\n\t// Gets the config file path.\n\tpath := os.Getenv(configFile)\n\tif path == \"\" {\n\t\tpath = defaultConfigFilePath\n\t}\n\n\tlogger.WithField(\"filename\", path).Info(\"Parsing config file\")\n\n\t// Reads config file.\n\tdata, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, errors.Annotate(err, \"cannot read config file\")\n\t}\n\n\tvar c *provider.Config\n\n\t// Unmarshals the read bytes.\n\tif err = yaml.Unmarshal(data, &c); err != nil {\n\t\treturn nil, errors.Annotate(err, \"cannot unmarshal config file\")\n\t}\n\n\tc.Label = strings.ToLower(c.Label)\n\n\treturn c, nil\n}", "func (c *CaddyController) loadConfigFromFile(cfg io.Reader) error {\n\tbuf := new(bytes.Buffer)\n\tbuf.ReadFrom(cfg)\n\n\terr := caddy.Load(buf.Bytes(), true)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"could not load caddy config %v\", err.Error())\n\t}\n\n\treturn nil\n}", "func loadConfig(c *cli.Context, file string, kmsEncryptionContext map[string]*string) (*config.Config, error) {\n\tvar err error\n\tvar configPath string\n\tif c.String(\"config\") != \"\" {\n\t\tconfigPath = c.String(\"config\")\n\t} else {\n\t\t// Ignore config not found errors returned from FindConfigFile since the config file is not mandatory\n\t\tconfigPath, err = config.FindConfigFile(\".\")\n\t\tif err != nil {\n\t\t\t// If we can't find a config file, but we were not explicitly requested to, assume it does not exist\n\t\t\treturn nil, nil\n\t\t}\n\t}\n\tconf, err := config.LoadForFile(configPath, file, kmsEncryptionContext)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn conf, nil\n}", "func configLoad(cfgfile string) (*ini.File, error) {\n\tcfg, err := ini.Load(cfgfile)\n\treturn cfg, err\n}", "func LoadClientCAFile(path string) (*x509.CertPool, error) {\n\troots := x509.NewCertPool()\n\tdata, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\troots.AppendCertsFromPEM(data)\n\treturn roots, nil\n}", "func loadConfig() (*Config, error) {\n\tlogDebug(\"Loading cxmate.json configuration file\")\n\tfile, err := os.Open(confLocation)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn loadFrom(file)\n}", "func LoadConfig(cf io.ReadCloser) (*Config, error) {\n\tdefer cf.Close()\n\n\tconfig, err := ParseConfig(cf)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tglog.V(1).Info(\"Configuration loaded\")\n\treturn config, nil\n}", "func (ca *CA) Load(dir string) error {\n\tcertPath := filepath.Join(dir, ca.CertFileName)\n\tkeyPath := filepath.Join(dir, ca.KeyFileName)\n\treturn ca.Pair.LoadFiles(certPath, keyPath)\n}", "func LoadConfig(args []string) (configData *ConfigData) {\n\t//default\n\tdefaultConfig := GetDefaultConfig()\n\t_configData = defaultConfig\n\t//http.conf\n\tfileConfig, _ := ParseConfigFromFile(CONFIGFILE)\n\n\t//-c,not use http.conf\n\n\t//Print config\n}", "func (c *Container) LoadConfig() error {\n\tfilename := filepath.Join(containerPath, c.Digest, containerConfigFile)\n\tfile, err := os.Open(filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tconfigFile, err := v1.ParseConfigFile(file)\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.Config = configFile.Config.DeepCopy()\n\treturn nil\n}", "func LoadConfig() {\n\n\terr := envconfig.Process(\"cosr\", &Config)\n\n\tif err != nil {\n\t\tlog.Fatal(err.Error())\n\t}\n}", "func (cf *Config) LoadConfig() {\n\t_, err := os.Stat(\"configs/config.yaml\")\n\tif os.IsNotExist(err) {\n\t\tlog.Panic(\"No config.yaml file found.\")\n\t}\n\tyamlFile, err := ioutil.ReadFile(\"configs/config.yaml\")\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\terr = yaml.Unmarshal(yamlFile, &cf)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n}", "func loadConfig(configName string) (map[string]string, error) {\n\tglog.V(4).Info(log(\"loading config file %s\", configName))\n\tfile, err := os.Open(configName)\n\tif err != nil {\n\t\tglog.Error(log(\"failed to open config file %s: %v\", configName, err))\n\t\treturn nil, err\n\t}\n\tdefer file.Close()\n\tdata := map[string]string{}\n\tif err := gob.NewDecoder(file).Decode(&data); err != nil {\n\t\tglog.Error(log(\"failed to parse config data %s: %v\", configName, err))\n\t\treturn nil, err\n\t}\n\tapplyConfigDefaults(data)\n\tif err := validateConfigs(data); err != nil {\n\t\tglog.Error(log(\"failed to load ConfigMap %s: %v\", err))\n\t\treturn nil, err\n\t}\n\n\treturn data, nil\n}", "func LoadCfgFromFile(name string) (*Cfg, error) {\n\tif name == \"\" {\n\t\tname = os.Getenv(\"AZURE_AUTH_LOCATION\")\n\t}\n\taf, err := loadAuthFile(name)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tc := &Cfg{\n\t\tSrc: name,\n\t\tEnvironment: *EnvForActiveDirectory(af.ActiveDirectoryEndpointURL),\n\t\tTenantID: af.TenantID,\n\t\tSubscriptionID: af.SubscriptionID,\n\t}\n\taf.updateEnv(&c.Environment)\n\tif err = c.useClientSecret(af.ClientID, af.ClientSecret); err != nil {\n\t\tc = nil\n\t}\n\treturn c, err\n}", "func LoadConfig(filename string) (*Config, error) {\n\tfilename_ := C.CString(filename)\n\tdefer freeString(filename_)\n\tcfg := C.al_load_config_file(filename_)\n\tif cfg == nil {\n\t\treturn nil, fmt.Errorf(\"failed to load config file '%s'\", filename)\n\t}\n\treturn (*Config)(cfg), nil\n}", "func loadConfig(file string) error {\n\tf, err := os.Open(file) // Open file\n\n\t// Error checking, returns error\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close() // Defer file closing to end of function\n\n\tdecoder := json.NewDecoder(f) // Create json decoder\n\terr = decoder.Decode(&cfg) // Decode the json into the config struct\n\n\t// Error checking\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func ConfigLoadFile(filename string) (*Config, error) {\n\t_, err := os.Stat(filename)\n\tif err == nil {\n\t\tfile, err := os.Open(filename)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tdefer file.Close()\n\t\tc, err := ConfigLoadReader(file)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tc.Filename = filename\n\t\treturn c, nil\n\t}\n\treturn nil, err\n}", "func loadConfig(file string) (*Config, error) {\n\n\t// Try to get config file, fallback to alternatives\n\tfile, err := getConfigFile(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tparsedConfig, err := ini.LooseLoad(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Map sections\n\tserver := ServerConfig{}\n\tparsedConfig.Section(\"server\").MapTo(&server)\n\n\t// Get all sources\n\tsources, err := getSources(parsedConfig)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Get UI configurations\n\tui, err := getUiConfig(parsedConfig)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tconfig := &Config{\n\t\tServer: server,\n\t\tUi: ui,\n\t\tSources: sources,\n\t\tFile: file,\n\t}\n\n\treturn config, nil\n}", "func (ch *CertHelper) loadCACert(caCertPath string, caPrivateKeyPath string) derrors.Error {\n\tca, err := tls.LoadX509KeyPair(caCertPath, caPrivateKeyPath)\n\tif err != nil {\n\t\treturn derrors.AsError(err, \"cannot load CA certificate an Private Key\")\n\t}\n\tif len(ca.Certificate) == 0 {\n\t\treturn derrors.NewNotFoundError(\"CA certificate not found in path\")\n\t}\n\tcaCert, err := x509.ParseCertificate(ca.Certificate[0])\n\tif err != nil {\n\t\treturn derrors.AsError(err, \"cannot parse CA certificate\")\n\t}\n\tch.CACert = caCert\n\tch.PrivateKey = ca.PrivateKey\n\tlog.Info().Str(\"dnsNames\", strings.Join(ch.CACert.DNSNames, \", \")).Msg(\"CA cert has been loaded\")\n\treturn nil\n}", "func LoadConfig(configFile *string, appConfig *AppConfig) {\n\tlog.Info(\"Initialize configuration\")\n\n\tstream, err := ioutil.ReadFile(*configFile)\n\tif err != nil {\n\t\tlog.WithFields(logrus.Fields{\n\t\t\t\"error\": err,\n\t\t}).Fatal(\"Failed to open configuration file\")\n\t}\n\n\tif err := yaml.Unmarshal(stream, &appConfig); err != nil {\n\t\tlog.WithFields(logrus.Fields{\n\t\t\t\"error\": err,\n\t\t}).Fatal(\"Failed to unmarshal stream\")\n\t}\n\tlog.Info(\"Successfully initialize configuration\")\n}", "func loadConfig() {\n\tpath, err := tcpauseCmd.PersistentFlags().GetString(\"config\")\n\tif err != nil {\n\t\tlogger.WithError(err).Fatal(\"Could not get config path\")\n\t}\n\n\t// configure viper\n\tif path != \"\" {\n\t\tv.SetConfigFile(path)\n\t} else {\n\t\tv.SetConfigName(\"config\")\n\t\tv.AddConfigPath(\"/etc/puppetlabs/puppet-proxy\")\n\t\tv.AddConfigPath(\".\")\n\t}\n\n\t// read config\n\terr = v.ReadInConfig()\n\tif err != nil {\n\t\tlogger.WithError(err).Fatal(\"Could not read config\")\n\t}\n\n\t// parse config\n\terr = v.Unmarshal(&cfg)\n\tif err != nil {\n\t\tlogger.WithError(err).Fatal(\"Could not parse config\")\n\t}\n}", "func LoadConfig(path string) (Config, error) {\n\tcfgBody, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn Config{}, err\n\t}\n\treturn ParseConfig(string(cfgBody))\n}", "func LoadConfig(file string) (*Config, error) {\n\tvar conf Config\n\tif _, err := toml.DecodeFile(file, &conf); err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to load config: %v\", err)\n\t}\n\n\t// load signing key\n\tsignPath := utils.ResolvePath(conf.SignPubkeyPath, file)\n\tsignPubKey, err := ioutil.ReadFile(signPath)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Cannot read signing key: %v\", err)\n\t}\n\tif len(signPubKey) != sign.PublicKeySize {\n\t\treturn nil, fmt.Errorf(\"Signing public-key must be 32 bytes (got %d)\", len(signPubKey))\n\t}\n\n\tconf.SigningPubKey = signPubKey\n\n\treturn &conf, nil\n}", "func (c *RunCommand) loadConfig() (*config.Config, error) {\n\tconf := config.NewConfig()\n\n\tdata, err := c.readConfigFile()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = conf.LoadYAML(data)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn conf, err\n}", "func loadConfig() ([]*ProviderConfig, error) {\n\t// Gets the config file path.\n\tpath := os.Getenv(configFile)\n\tif path == \"\" {\n\t\tpath = defaultConfigFilePath\n\t}\n\n\tlogger.WithField(\"filename\", path).Info(\"Parsing config file\")\n\n\t// Reads config file.\n\tdata, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, errors.Annotate(err, \"cannot read config file\")\n\t}\n\n\tvar c []*ProviderConfig\n\n\t// Unmarshals the read bytes.\n\tif err = yaml.Unmarshal(data, &c); err != nil {\n\t\treturn nil, errors.Annotate(err, \"cannot unmarshal config file\")\n\t}\n\n\t// Formats label\n\tfor _, provider := range c {\n\t\tprovider.Label = strings.ToLower(provider.Label)\n\t}\n\n\treturn c, nil\n}", "func loadConfig(configFile string, config *Config) error {\n\t// Load Conifg\n\tbuf, err := ioutil.ReadFile(configFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"load config file error: %v\", err)\n\n\t}\n\n\tif err = json.Unmarshal(buf, config); err != nil {\n\t\treturn fmt.Errorf(\"parse config err: %v\", err)\n\t}\n\n\treturn nil\n}", "func loadConfig(configFile string, config *Config) error {\n\t// Load Conifg\n\tbuf, err := ioutil.ReadFile(configFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"load config file error: %v\", err)\n\n\t}\n\n\tif err = json.Unmarshal(buf, config); err != nil {\n\t\treturn fmt.Errorf(\"parse config err: %v\", err)\n\t}\n\n\treturn nil\n}", "func loadConfig(configFile string) *utils.Config {\n\tconf := &utils.Config{}\n\tif _, err := toml.DecodeFile(configFile, &conf); err != nil {\n\t\tfmt.Println(err)\n\t}\n\treturn conf\n}", "func (c *IdentityConfig) CAConfig(caID string) (*msp.CAConfig, bool) {\n\tcfg, ok := c.caConfigs[strings.ToLower(caID)]\n\treturn cfg, ok\n}", "func load() (*cfgfile.ConfigFile, error) {\n\tif changeConfigDir {\n\t\tlogrus.Infoln(\"use config:\", filepath.Join(configDir, ConfigFileName))\n\t}\n\tconf := newcfgfile(filepath.Join(configDir, ConfigFileName))\n\terr := conf.Load()\n\tif err != nil && err != cfgfile.ErrConfigFileMiss {\n\t\treturn nil, err\n\t}\n\tprgpath, _ := utils.GetProcAbsDir()\n\ttmp, err := ioutil.ReadFile(filepath.Join(prgpath, DefaultUniqueAgentIdFile))\n\tif err != nil {\n\t\tconf.Agentid = ZERO_AGENT_ID\n\t} else {\n\t\tconf.Agentid = strings.TrimSpace(string(tmp))\n\t}\n\n\tConf = conf\n\n\treturn conf, nil\n}", "func (cfg *Config) loadTLSConfig() error {\n\tif cfg.TLS.Enable {\n\t\tif cfg.TLS.CA == \"\" {\n\t\t\treturn errors.New(\"The CA can not be empty\")\n\t\t}\n\t\tif cfg.TLS.RawCert == \"\" {\n\t\t\treturn errors.New(\"The cert can not be empty\")\n\t\t}\n\t\tif cfg.TLS.Key == \"\" {\n\t\t\treturn errors.New(\"The key can not be empty\")\n\t\t}\n\t\t// load pool\n\t\tpool := x509.NewCertPool()\n\t\tca, err := ioutil.ReadFile(cfg.TLS.CA)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tok := pool.AppendCertsFromPEM(ca)\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"Failed to load ca file: %s\", cfg.TLS.CA)\n\t\t}\n\t\t// load cert\n\t\tcert, err := tls.LoadX509KeyPair(cfg.TLS.RawCert, cfg.TLS.Key)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tcfg.TLS.Pool = pool\n\t\tcfg.TLS.Cert = &cert\n\t}\n\treturn nil\n}", "func LoadConfig(tls *config.TLS, scram *config.SCRAM) (*sarama.Config, error) {\n\tvar err error\n\tc := DefaultConfig()\n\n\tif tls != nil {\n\t\tc, err = configureTLS(c, tls)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif scram != nil {\n\t\treturn ConfigureSASL(c, scram)\n\t}\n\treturn c, nil\n}", "func (s *AuthConfigService) LoadConfig() (*AuthConfig, error) {\n\tconfig := &s.config\n\tfileName := s.FileName\n\tif fileName != \"\" {\n\t\texists, err := util.FileExists(fileName)\n\t\tif err != nil {\n\t\t\treturn config, fmt.Errorf(\"Could not check if file exists %s due to %s\", fileName, err)\n\t\t}\n\t\tif exists {\n\t\t\tdata, err := ioutil.ReadFile(fileName)\n\t\t\tif err != nil {\n\t\t\t\treturn config, fmt.Errorf(\"Failed to load file %s due to %s\", fileName, err)\n\t\t\t}\n\t\t\terr = yaml.Unmarshal(data, &config)\n\t\t\tif err != nil {\n\t\t\t\treturn config, fmt.Errorf(\"Failed to unmarshal YAML file %s due to %s\", fileName, err)\n\t\t\t}\n\t\t}\n\t}\n\treturn config, nil\n}", "func loadCaCertAndKey(dir, name string) (*x509.Certificate, *rsa.PrivateKey) {\n\tutils.Assert(certs.CertOrKeyExist(dir, name), \"couldn't load %s/%s\", dir, name)\n\treturn certs.TryLoadCertAndKeyFromDisk(dir, name)\n}", "func (openControl *OpenControl) LoadCertification(certificationFile string) error {\n\tvar certification Certification\n\tcertificationData, err := ioutil.ReadFile(certificationFile)\n\tif err != nil {\n\t\treturn ErrReadFile\n\t}\n\terr = yaml.Unmarshal(certificationData, &certification)\n\tif err != nil {\n\t\treturn ErrCertificationSchema\n\t}\n\topenControl.Certification = &certification\n\treturn nil\n}", "func (f *File) LoadConfig() (*Config, error) {\n\tcfg := C.al_load_config_file_f((*C.ALLEGRO_FILE)(f))\n\tif cfg == nil {\n\t\treturn nil, errors.New(\"failed to load config from file\")\n\t}\n\treturn (*Config)(cfg), nil\n}", "func (c *Config) LoadConfig(path string) {\n\n\tf, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\tlog.Panicln(err)\n\t}\n\n\tif err := json.Unmarshal(f, &c); err != nil {\n\t\tlog.Panicln(err)\n\t}\n\n}", "func LoadConfig(file string) (c *Config, err error) {\n\tc = new(Config)\n\n\t// Check for tag\n\tsFile := checkName(file)\n\tif sFile == \"\" {\n\t\treturn c, fmt.Errorf(\"Wrong format for %s\", file)\n\t}\n\n\t// Check if there is any config file\n\tif _, err := os.Stat(sFile); err != nil {\n\t\t// No config file is no error\n\t\treturn c, nil\n\t}\n\n\t// Read it\n\tbuf, err := ioutil.ReadFile(sFile)\n\tif err != nil {\n\t\treturn c, fmt.Errorf(\"Can not read %s\", sFile)\n\t}\n\n\terr = toml.Unmarshal(buf, &c)\n\tif err != nil {\n\t\treturn c, fmt.Errorf(\"Error parsing toml %s: %v\",\n\t\t\tsFile, err)\n\t}\n\n\treturn c, nil\n}", "func (o *Options) loadConfigFromFile(file string) (*componentconfig.CoordinatorConfiguration, error) {\n\tdata, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn o.decodeConfig(data)\n}", "func LoadConfig(fname string, ctx *interfaces.Context) (err error) {\n\tdoc, err := ioutil.ReadFile(fname)\n\tif err != nil {\n\t\tctx.Logger.Error().Msgf(\"Could not read config file: %s\", err.Error())\n\t\treturn\n\t}\n\n\ttoml.Unmarshal(doc, &ctx.Config)\n\tctx.Logger.Debug().Msg(spew.Sprint(ctx.Config))\n\n\treturn nil\n}", "func readCAFile(f string) ([]byte, error) {\n\tdata, err := ioutil.ReadFile(f)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to load specified CA cert %s: %s\", f, err)\n\t}\n\treturn data, nil\n}", "func loadCaCertPem(in io.Reader) ([]byte, error) {\n\tcaCertPemBytes, err := ioutil.ReadAll(in)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tblock, _ := pem.Decode(caCertPemBytes)\n\tif block == nil {\n\t\treturn nil, errors.New(\"could not decode pem\")\n\t}\n\tif block.Type != \"CERTIFICATE\" {\n\t\treturn nil, fmt.Errorf(\"ca bundle contains wrong pem type: %q\", block.Type)\n\t}\n\tif _, err := x509.ParseCertificate(block.Bytes); err != nil {\n\t\treturn nil, fmt.Errorf(\"ca bundle contains invalid x509 certificate: %v\", err)\n\t}\n\treturn caCertPemBytes, nil\n}", "func loadConfig(configName string, configType string) error {\n\tviper.SetConfigName(configName)\n\tviper.SetConfigType(configType)\n\n\terr := viper.ReadInConfig()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot read configuration file: %s\\n\", err)\n\t}\n\n\treturn nil\n}", "func loadConfig(configFile string) service.ServiceConfig {\n conf := service.ServiceConfig{}\n\n if err := loader.FromFile(&conf, configFile); err != nil {\n StdErr.Write([]byte(err.Error()))\n os.Exit(ERR_CONFIG)\n }\n\n return conf\n}", "func (c *IdentityConfig) CAConfig(org string) (*msp.CAConfig, error) {\n\tnetworkConfig, err := c.networkConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c.getCAConfig(networkConfig, org)\n}", "func LoadConfig(fileLoc string) (string, error) {\n\tcontent, err := ioutil.ReadFile(config)\n\treturn string(content), err\n}", "func LoadClientTLSConfig(sslCA, sslCert, sslCertKey string) (*tls.Config, error) {\n\tcertPEM, err := assetLoaderImpl.ReadFile(sslCert)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tkeyPEM, err := assetLoaderImpl.ReadFile(sslCertKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tcaPEM, err := assetLoaderImpl.ReadFile(sslCA)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn newClientTLSConfig(certPEM, keyPEM, caPEM)\n}", "func LoadConfig(path string) (*Config, error) {\n\tconfigData := &Config{}\n\n\tif path == \"\" {\n\t\treturn nil, ErrInvalidPath\n\t}\n\n\t// Check the beacon rc file exists; if not, create it.\n\t_, err := os.Stat(\"./.beaconrc\")\n\tif err != nil {\n\t\tInitConfig()\n\t}\n\n\t// Read the config file from disk\n\tconfigFile, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, Wrap(\"Error reading the `.beaconrc` file.\", err)\n\t}\n\n\tif err := json.Unmarshal(configFile, &configData); err != nil {\n\t\treturn nil, Wrap(\"error parsing JSON\", err)\n\t}\n\n\tif configData.Author == \"\" {\n\t\treturn nil, ErrAuthorRequired\n\t}\n\n\tif configData.Email == \"\" {\n\t\treturn nil, ErrEmailRequired\n\t}\n\n\treturn configData, nil\n\n}", "func LoadConfig() (Config, error) {\n\tvar c Config\n\n\tctx := context.Background()\n\n\terr := envconfig.Process(ctx, &c)\n\treturn c, err\n}", "func LoadConfig(file string) (AppConfig, error) {\n\tvar conf AppConfig\n\tif _, err := toml.DecodeFile(file, &conf); err != nil {\n\t\treturn nil, fmt.Errorf(\"Failed to load config: %v\", err)\n\t}\n\treturn conf, nil\n}", "func LoadConfigFile(filename string) (cfg Config, err error) {\n\tf, err := os.Open(filename)\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tdefer f.Close()\n\n\treturn cfg, toml.NewDecoder(f).Decode(&cfg)\n}", "func loadConfig(filename string) (Config, error) {\n\tvar cfg Config\n\tfile, err := os.ReadFile(filename)\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\terr = json.Unmarshal(file, &cfg)\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\treturn cfg, nil\n}", "func loadConfigFromFile(path string) (*configuration, error) {\n\t// open config file\n\tf, err := os.Open(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer f.Close()\n\t// read config file\n\tbuf, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn unmarshalConfig(buf)\n}", "func loadConfig(path string) (*Config, error) {\n\n\tif _, err := os.Stat(path); err != nil {\n\t\treturn nil, errors.New(\"config file does not exist\")\n\t}\n\n\tinput, err := os.Open(path)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"cannot open config file: %v\", err)\n\t}\n\tdefer input.Close()\n\n\ttemp, err := ioutil.ReadAll(input)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"cannot read config file: %v\", err)\n\t}\n\n\tvar c *Config\n\terr = yaml.Unmarshal([]byte(temp), &c)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"cannot unmarshall config file: %v\", err)\n\t}\n\n\treturn c, nil\n}", "func load_config(configPath string) (*Config, error) {\n\tconfig := &Config{}\n\n\t// if no path is supplied look in the current dir\n\tif configPath == \"\" {\n\t\tconfigPath, _ = filepath.Abs(filepath.Dir(os.Args[0]))\n\t\tconfigPath += \"/service.conf\"\n\t}\n\n\tcfile, _ := os.Open(configPath)\n\tif err := json.NewDecoder(cfile).Decode(&config); err != nil {\n\t\treturn config, err\n\t}\n\n\tif metadata.Description != \"\" {\n\t\tif data, err := ioutil.ReadFile(string(metadata.Description)); err == nil {\n\t\t\tmetadata.Description = strings.Replace(string(data), \"\\n\", \"<br>\", -1)\n\t\t}\n\t}\n\n\tif metadata.License != \"\" {\n\t\tif data, err := ioutil.ReadFile(string(metadata.License)); err == nil {\n\t\t\tmetadata.License = strings.Replace(string(data), \"\\n\", \"<br>\", -1)\n\t\t}\n\t}\n\n\treturn config, nil\n}", "func LoadCertificatesFrom(pemFile string) (*x509.CertPool, error) {\n\tcaCert, err := ioutil.ReadFile(pemFile)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tcertificates := x509.NewCertPool()\n\tcertificates.AppendCertsFromPEM(caCert)\n\treturn certificates, nil\n}", "func LoadConfig(fileName string) error {\n\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\tappError := models.NewAppError(\"LoadConfig\", \"utils.config.load_conifg.open_file: \", err.Error(), 500)\n\t\treturn appError\n\t}\n\n\tdecoder := json.NewDecoder(file)\n\tconfig := models.Config{}\n\terr = decoder.Decode(&config)\n\tif err != nil {\n\t\tappError := models.NewAppError(\"LoadConfig\", \"utils.config.load_config.decode_json: \", err.Error(), 500)\n\t\treturn appError\n\t}\n\n\tconfig.SetDefaults()\n\n\tCfg = &config\n\n\treturn nil\n}", "func loadConfig() {\n\tr, err := os.Open(configPath)\n\tif err != nil {\n\t\tlog.Fatalf(\"Can't open config file!\")\n\t}\n\tdefer r.Close()\n\tdec := json.NewDecoder(r)\n\tdec.Decode(&config)\n\n\tlog.Printf(\"Config.DBAddress: %s\\n\", config.DBAddress)\n\tlog.Printf(\"Config.WorkersNo: %d\\n\", config.WorkersNo)\n\tlog.Printf(\"Config.ClearDb: %t\\n\", config.ClearDb)\n\tlog.Printf(\"Config.From: %s\\n\", config.From)\n\tlog.Printf(\"Config.To: %s\\n\", config.To)\n\tlog.Printf(\"Config.Currency: %s\\n\", config.Currency)\n\tlog.Printf(\"Config.SupportedCurrencies: %v\\n\", config.Currencies)\n}", "func loadConfig() Config {\n\tvar args struct {\n\t\tConfigfile string `arg:\"positional\" help:\"the name of the .toml config file to load\"`\n\t\tNoCheck bool `help:\"set this to disable checking that envvar substitutions are fully resolved\"`\n\t}\n\targ.MustParse(&args)\n\n\tvar cfg Config\n\tvar err error\n\tif args.Configfile != \"\" {\n\t\tcfg, err = Load(args.Configfile, args.NoCheck)\n\t\tif err != nil {\n\t\t\tfmt.Printf(\"%s\\n\", err)\n\t\t\tos.Exit(1)\n\t\t}\n\t\treturn cfg\n\t}\n\tfmt.Println(\"a config file name is required!\")\n\tos.Exit(1)\n\treturn cfg\n}", "func LoadCACert(path string) (*x509.CertPool, error) {\n\tcerts, err := loadCertFromPEM(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresult := x509.NewCertPool()\n\tfor _, cert := range certs {\n\t\tresult.AddCert(cert)\n\t}\n\n\treturn result, nil\n}", "func LoadCACert(path string) (*x509.CertPool, error) {\n\tcerts, err := loadCertFromPEM(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresult := x509.NewCertPool()\n\tfor _, cert := range certs {\n\t\tresult.AddCert(cert)\n\t}\n\n\treturn result, nil\n}", "func LoadConfig() (ClientConfig, error) {\n conf := ClientConfig{}\n configFilePath, err := getConfigFilePath()\n if err != nil {\n return conf, err\n }\n\n data, err := os.ReadFile(configFilePath)\n if os.IsNotExist(err) {\n err = initConfig()\n if err != nil {\n return conf, err\n }\n data, err = os.ReadFile(configFilePath)\n }\n if err != nil {\n return conf, err\n }\n\n err = yaml.Unmarshal(data, &conf)\n if err != nil {\n return conf, err\n }\n\n return conf, nil\n}", "func LoadConfig(configFile string) *gnatsd.Options {\n\topts, err := gnatsd.ProcessConfigFile(configFile)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"Error processing configuration file: %v\", err))\n\t}\n\treturn opts\n}", "func (config *Config) readConcertoConfig(c *cli.Context) error {\n\tlog.Debug(\"Reading Concerto Configuration\")\n\tif FileExists(config.ConfFile) {\n\t\t// file exists, read it's contents\n\n\t\txmlFile, err := os.Open(config.ConfFile)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer xmlFile.Close()\n\t\tb, err := ioutil.ReadAll(xmlFile)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"configuration File %s couldn't be read\", config.ConfFile)\n\t\t}\n\n\t\tif err = xml.Unmarshal(b, &config); err != nil {\n\t\t\treturn fmt.Errorf(\"configuration File %s does not have valid XML format\", config.ConfFile)\n\t\t}\n\n\t} else {\n\t\tlog.Debugf(\"Configuration File %s does not exist. Reading environment variables\", config.ConfFile)\n\t}\n\n\t// overwrite with environment/arguments vars\n\tif overwEP := c.String(\"concerto-endpoint\"); overwEP != \"\" {\n\t\tlog.Debug(\"Concerto APIEndpoint taken from env/args\")\n\t\tconfig.APIEndpoint = overwEP\n\t}\n\n\tif overwCert := c.String(\"client-cert\"); overwCert != \"\" {\n\t\tlog.Debug(\"Certificate path taken from env/args\")\n\t\tconfig.Certificate.Cert = overwCert\n\t}\n\n\tif overwKey := c.String(\"client-key\"); overwKey != \"\" {\n\t\tlog.Debug(\"Certificate key path taken from env/args\")\n\t\tconfig.Certificate.Key = overwKey\n\t}\n\n\tif overwCa := c.String(\"ca-cert\"); overwCa != \"\" {\n\t\tlog.Debug(\"CA certificate path taken from env/args\")\n\t\tconfig.Certificate.Ca = overwCa\n\t}\n\n\t// if endpoint empty set default\n\t// we can't set the default from flags, because it would overwrite config file\n\tif config.APIEndpoint == \"\" {\n\t\tconfig.APIEndpoint = defaultConcertoEndpoint\n\t}\n\n\treturn nil\n}", "func (c *CA) ParseConfig() (*admin.CertificateAuthorityConfig, error) {\n\tmsg := &admin.CertificateAuthorityConfig{}\n\tif err := proto.Unmarshal(c.Config, msg); err != nil {\n\t\treturn nil, err\n\t}\n\treturn msg, nil\n}", "func LoadConfig(configFile string) (*Config, error) {\n\tvar data []byte\n\tvar err error\n\tif data, err = ioutil.ReadFile(configFile); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Expand envirovment variables defined in the config\n\tdata = []byte(os.ExpandEnv(string(data)))\n\n\tvar c Config\n\tif err := yaml.Unmarshal([]byte(data), &c); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &c, nil\n}", "func LoadConfig(file string) (*Config, error) {\n\tb, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif ext := path.Ext(file); ext == \".yaml\" || ext == \".yml\" {\n\t\tb, err = yaml.YAMLToJSON(b)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tconfig := &Config{}\n\tif err = json.Unmarshal(b, config); err != nil {\n\t\treturn nil, err\n\t}\n\n\tfixMbeanNames(config)\n\treturn config, nil\n}", "func (cfg *Config) Load() error {\n\tcfg.loadCLIConfigEnv()\n\n\tif err := cfg.parseConfig(); err != nil {\n\t\treturn fmt.Errorf(\"failed to read config file : %s\", err.Error())\n\t}\n\n\tif err := cfg.parseBPF(); err != nil {\n\t\treturn fmt.Errorf(\"failed to read BPF file : %s\", err.Error())\n\t}\n\n\tcfg.loadCLIOverrides()\n\treturn nil\n}", "func LoadConfig(configPath string) ([]byte, error) {\n\tconfig, err := ioutil.ReadFile(configPath)\n\treturn config, err\n}", "func addCACertFromFile(cfg *tls.Config, file string) error {\n\tdata, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif cfg.RootCAs == nil {\n\t\tcfg.RootCAs = x509.NewCertPool()\n\t}\n\tif !cfg.RootCAs.AppendCertsFromPEM(data) {\n\t\treturn errors.New(\"the specified CA file does not contain any valid certificates\")\n\t}\n\n\treturn nil\n}", "func LoadConfig(configFile string) {\n\tif _, err := os.Stat(configFile); os.IsNotExist(err) {\n\t\tfmt.Println(\"Config file \\\"\" + configFile + \"\\\" does not exist.\")\n\t\treturn\n\t} else if err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn\n\t}\n\n\tif _, err := toml.DecodeFile(configFile, &Config); err != nil {\n\t\tfmt.Println(err.Error())\n\t\treturn\n\t}\n}", "func LoadSecurityConfig(ctx context.Context, rootCA RootCA, krw *KeyReadWriter, allowExpired bool) (*SecurityConfig, func() error, error) {\n\tctx = log.WithModule(ctx, \"tls\")\n\n\t// At this point we've successfully loaded the CA details from disk, or\n\t// successfully downloaded them remotely. The next step is to try to\n\t// load our certificates.\n\n\t// Read both the Cert and Key from disk\n\tcert, key, err := krw.Read()\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Check to see if this certificate was signed by our CA, and isn't expired\n\t_, chains, err := ValidateCertChain(rootCA.Pool, cert, allowExpired)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\t// ValidateChain, if successful, will always return at least 1 chain containing\n\t// at least 2 certificates: the leaf and the root.\n\tissuer := chains[0][1]\n\n\t// Now that we know this certificate is valid, create a TLS Certificate for our\n\t// credentials\n\tkeyPair, err := tls.X509KeyPair(cert, key)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tsecConfig, cleanup, err := NewSecurityConfig(&rootCA, krw, &keyPair, &IssuerInfo{\n\t\tSubject: issuer.RawSubject,\n\t\tPublicKey: issuer.RawSubjectPublicKeyInfo,\n\t})\n\tif err == nil {\n\t\tlog.G(ctx).WithFields(log.Fields{\n\t\t\t\"node.id\": secConfig.ClientTLSCreds.NodeID(),\n\t\t\t\"node.role\": secConfig.ClientTLSCreds.Role(),\n\t\t}).Debug(\"loaded node credentials\")\n\t}\n\treturn secConfig, cleanup, err\n}", "func LoadConfig(configFile string) ([]byte, error) {\n\tif configFile == \"\" {\n\t\tconfigFile = os.Getenv(\"CONFIG_PATH\")\n\t\tif configFile == \"\" {\n\t\t\tconfigFile = DefaultConfigFile\n\t\t}\n\t}\n\n\tlog.Printf(\"Using config file: %s\\n\", configFile)\n\n\tdata, err := ioutil.ReadFile(configFile)\n\tif err != nil {\n\t\tlog.Printf(\"unable to read application config file %s, error: %s\\n\", configFile, err.Error())\n\t\treturn nil, err\n\t}\n\treturn data, err\n}", "func LoadConfig() {\n\tvar configFile string\n\t//todo the default config Path\n\tflag.IntVar(&Config.HttpPort, \"httpport\", 13000, \"the http port\")\n\tflag.IntVar(&Config.MsgPort, \"port\", 13001, \"the msg port\")\n\tflag.IntVar(&Config.Retry, \"r\", 3, \"the retry times\")\n\tflag.StringVar(&Config.Aof, \"aof\", \"msgo.aof\", \"the aof file path\")\n\tflag.IntVar(&Config.SyncType, \"sync\", 0, \"the default sync type of aof\")\n\tflag.IntVar(&Config.Threshold, \"rewrite-threshold\", 10000, \"the default threshold of deleteOps that triggers rewrite operation\")\n\tflag.StringVar(&configFile, \"c\", \"\", \"the config file path\")\n\tflag.Parse()\n\tif configFile != \"\" {\n\t\tBytes, err := ioutil.ReadFile(configFile)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Sprintf(\"reading config file error %s: %v\", configFile, err))\n\t\t}\n\t\tif _, err := toml.Decode(string(Bytes), Config); err != nil {\n\t\t\tpanic(fmt.Sprintf(\"parse config file error %s: %v\", configFile, err))\n\t\t}\n\t}\n\tArbitrateConfigs(Config)\n}", "func loadConfig() error {\n\t//if path != \"\" {\n\tif defaultConfig.IsSet(\"cfgFile\") && defaultConfig.GetString(\"cfgFile\") != \"\" {\n\t\tpath := defaultConfig.GetString(\"cfgFile\")\n\t\t_, err := os.Stat(path)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tfilename := filepath.Base(path)\n\t\tdefaultConfig.SetConfigName(filename[:len(filename)-len(filepath.Ext(filename))])\n\t\tdefaultConfig.AddConfigPath(filepath.Dir(path))\n\n\t\tif err := defaultConfig.ReadInConfig(); err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to read config file (%s): %s\\n\", path, err.Error())\n\t\t}\n\t}\n\treturn nil\n}", "func (a *appState) loadConfigFile(ctx context.Context) error {\n\tcfgPath := a.configPath()\n\n\tif _, err := os.Stat(cfgPath); err != nil {\n\t\t// don't return error if file doesn't exist\n\t\treturn nil\n\t}\n\n\t// read the config file bytes\n\tfile, err := os.ReadFile(cfgPath)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error reading file: %w\", err)\n\t}\n\n\t// unmarshall them into the wrapper struct\n\tcfgWrapper := &ConfigInputWrapper{}\n\terr = yaml.Unmarshal(file, cfgWrapper)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error unmarshalling config: %w\", err)\n\t}\n\n\t// retrieve the runtime configuration from the disk configuration.\n\tnewCfg, err := cfgWrapper.RuntimeConfig(ctx, a)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// validate runtime configuration\n\tif err := newCfg.validateConfig(); err != nil {\n\t\treturn fmt.Errorf(\"error parsing chain config: %w\", err)\n\t}\n\n\t// save runtime configuration in app state\n\ta.config = newCfg\n\n\treturn nil\n}", "func loadConfigFile() (err error) {\n\tviper.AddConfigPath(\"./config\")\n\tviper.SetConfigName(\"mru\")\n\tviper.SetConfigType(\"yml\")\n\n\tif err = viper.ReadInConfig(); err != nil {\n\t\tError.Println(\"Unable to read config file. \", err)\n\t\treturn\n\t}\n\n\tif err = viper.Unmarshal(&config); err != nil {\n\t\tError.Println(\"Unable to decode configuration into struct. \", err)\n\t\treturn\n\t}\n\n\treturn\n}", "func (manager *Manager) LoadConfigurationFile(configFile string) config.Config {\n\tconfiguration, err := config.LoadFromFile(configFile)\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\treturn configuration\n}", "func LoadConfig(source string) (*Config, error) {\n\tjsonData, err := ioutil.ReadFile(source)\n\n\tif err != nil {\n\t\treturn nil, ErrorConfigNotFound\n\t}\n\n\tvar config Config\n\n\terr = json.Unmarshal(jsonData, &config)\n\n\tif err != nil {\n\t\treturn nil, ErrorFormatConfigNotValid\n\t}\n\n\treturn &config, nil\n}", "func loadConfig() error {\n\t// load template config\n\tif err := config.LoadConfig(\"assets/config.tpl.json\"); err != nil {\n\t\treturn err\n\t}\n\n\t// load custom config\n\tif err := config.LoadConfig(\"config.json\"); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func LoadConfigFile(configFile string) (*Config, error) {\n\tfileContent, err := ioutil.ReadFile(configFile)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"Unable to load config file '%s', error: %s\", configFile, err.Error())\n\t}\n\n\tc := &Config{}\n\tif err = yaml.Unmarshal(fileContent, c); err != nil {\n\t\treturn nil, fmt.Errorf(\"Unable to parse config file '%s' as yaml, error: %s\", configFile, err.Error())\n\t}\n\n\tfor _, proj := range c.Projects {\n\t\tproj.YamlFile = os.ExpandEnv(proj.YamlFile)\n\t}\n\n\tif err = c.validate(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c, nil\n}", "func loadConfig(path string) (Config, error) {\n\tcfg := Config{}\n\tfilename, err := filepath.Abs(path)\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\tcontent, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\n\terr = yaml.Unmarshal(content, &cfg)\n\tif err != nil {\n\t\treturn cfg, err\n\t}\n\n\treturn cfg, err\n}", "func (ts *Tester) LoadConfig() (eksconfig.Config, error) {\n\treturn *ts.cfg, nil\n}", "func (c *component) LoadConfig(configBody *hcl.Body, evalContext *hcl.EvalContext) hcl.Diagnostics {\n\tif configBody == nil {\n\t\treturn hcl.Diagnostics{}\n\t}\n\n\treturn gohcl.DecodeBody(*configBody, evalContext, c)\n}", "func (c *component) LoadConfig(configBody *hcl.Body, evalContext *hcl.EvalContext) hcl.Diagnostics {\n\tif configBody == nil {\n\t\treturn hcl.Diagnostics{}\n\t}\n\n\treturn gohcl.DecodeBody(*configBody, evalContext, c)\n}", "func Load(filename string) (c *Config, err error) {\n\tcontent, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"config load: %w\", err)\n\t}\n\n\treturn Parse(content, filename)\n}", "func (a *AuthContainer) LoadFiles() (ca *x509.Certificate, cert *x509.Certificate, key *rsa.PrivateKey, err error) {\n\tca, err = GetCertificate(viper.GetString(\"file_ca\"))\n\tif err != nil {\n\t\treturn\n\t}\n\tcert, err = GetCertificate(viper.GetString(\"file_cert\"))\n\tif err != nil {\n\t\treturn\n\t}\n\tkey, err = GetPrivateKey(viper.GetString(\"file_key\"), a.Passphrase)\n\n\ta.CA = ca\n\ta.Cert = cert\n\ta.Key = key\n\n\treturn\n}", "func LoadConfigFile(filename string) (*Config, []byte, error) {\n\tlog.V(1).Infof(\"Loading configuration from %q\", filename)\n\tcontent, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tcfg, err := LoadConfig(string(content))\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t//resolveFilepaths(filepath.Dir(filename), cfg)\n\treturn cfg, content, nil\n}", "func loadConfig(path string) Config {\n\tvar config Config\n\tviper.SetConfigName(\"config\")\n\tviper.AddConfigPath(path)\n\terr := viper.ReadInConfig() // Find and read the config file\n\tif err != nil { // Handle errors reading the config file\n\t\tpanic(fmt.Errorf(\"Fatal error config file: %s\", err))\n\t}\n\terr = viper.Unmarshal(&config)\n\thandleErr(err)\n\treturn config\n}", "func (a *AuthConfig) Load(fileName string) error {\n\tf, err := os.Open(fileName)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\tb, err := ioutil.ReadAll(f)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\treturn json.Unmarshal(b, a)\n\n}", "func (c *Config) Load(filename string) error {\n\tif filename == \"\" {\n\t\treturn errors.New(\"no configuration file was specified\")\n\t}\n\tconfig.filename = filename\n\n\tdata, err := os.ReadFile(config.filename)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn config.populateMap(data, filepath.Ext(config.filename))\n}", "func (c *Configuration) LoadConfigFile(fs afero.Fs, config *Configuration) error {\n\tif _, err := fs.Stat(ConfigFileName); os.IsNotExist(err) {\n\t\tcolor.Yellow(\"The file %s does not exist. Using default config.\", ConfigFileName)\n\t\treturn nil\n\t}\n\n\tyamlFile, err := afero.ReadFile(fs, ConfigFileName)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = yaml.Unmarshal(yamlFile, &config)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func LoadConfig(fname string) (m *Config) {\n\tm = &Config{}\n\tif err := LoadYaml(m, fname); err != nil {\n\t\tif isV {\n\t\t\tfmt.Println(\" File can not unmarshal.:\", fname)\n\t\t}\n\t\treturn nil\n\t}\n\treturn\n}", "func LoadConfig(file string) error {\n\n\tdata, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"config: unable to read config file (%s) due to error: %v\", file, err)\n\t}\n\n\terr = json.Unmarshal(data, &cfg)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"config: unable to parse config file (%s) as JSON due to error: %v\", file, err)\n\t}\n\n\tif !strings.HasSuffix(cfg.ContentFolder, \"/\") {\n\t\tcfg.ContentFolder = cfg.ContentFolder + \"/\"\n\t}\n\tif !strings.HasSuffix(cfg.TemplateFolder, \"/\") {\n\t\tcfg.TemplateFolder = cfg.TemplateFolder + \"/\"\n\t}\n\tif !strings.HasSuffix(cfg.PublicFolder, \"/\") {\n\t\tcfg.PublicFolder = cfg.PublicFolder + \"/\"\n\t}\n\n\treturn nil\n}", "func LoadFromFile(path string) (*Config, error) {\n\tif len(path) == 0 {\n\t\tbinDir, err := filepath.Abs(filepath.Dir(os.Args[0]))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tpath = filepath.Join(binDir, \"cf-config-broker.json\")\n\t}\n\tjsonConf, err := ioutil.ReadFile(path)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn parseJSON(jsonConf)\n}", "func (f *Frontend) Loadconfig() error {\n\tcfgFile := configFile()\n\tcfgExists, err := exists(cfgFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif !cfgExists {\n\t\treturn fmt.Errorf(configNotFound)\n\t}\n\tbs, err := ioutil.ReadFile(cfgFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = json.Unmarshal(bs, f.Config)\n\tif err != nil {\n\t\treturn err\n\t}\n\tf.storage.Config = f.Config\n\tf.loader.Config = f.Config\n\treturn err\n}" ]
[ "0.7195883", "0.71864015", "0.68143547", "0.67846006", "0.65441793", "0.65345585", "0.6530978", "0.6429465", "0.6428769", "0.635782", "0.63450205", "0.62620205", "0.625691", "0.6199508", "0.61908334", "0.61713904", "0.61611664", "0.6110361", "0.6077055", "0.6054326", "0.6053029", "0.60479033", "0.60383046", "0.59897643", "0.59809804", "0.59488577", "0.59420705", "0.59304905", "0.59280145", "0.5924543", "0.5924543", "0.59191245", "0.590923", "0.5908682", "0.5906692", "0.5898973", "0.5898967", "0.58925927", "0.5891095", "0.58900565", "0.58895457", "0.5884739", "0.58713454", "0.5868065", "0.58637154", "0.58636564", "0.5856016", "0.58486986", "0.5842252", "0.5837334", "0.58314955", "0.5803576", "0.57937443", "0.57777846", "0.57763517", "0.5775618", "0.5773756", "0.5758848", "0.5755832", "0.57537955", "0.57515424", "0.574892", "0.5746687", "0.5742807", "0.5742807", "0.57410324", "0.57404447", "0.57391274", "0.57364404", "0.5734401", "0.57340974", "0.5731252", "0.572755", "0.5721911", "0.57098156", "0.5706962", "0.57036394", "0.5699402", "0.5696951", "0.5693763", "0.5689716", "0.5688819", "0.5688567", "0.5686286", "0.5685413", "0.5685152", "0.56829965", "0.56759155", "0.56759155", "0.5671679", "0.5660218", "0.56598073", "0.5657497", "0.565706", "0.56553006", "0.565146", "0.56492186", "0.5645581", "0.56340826", "0.5631544" ]
0.777478
0
addCA adds a CA to the server if there are no conflicts
func (s *Server) addCA(ca *CA) error { // check for conflicts caName := ca.Config.CA.Name for _, c := range s.caMap { if c.Config.CA.Name == caName { return errors.Errorf("CA name '%s' is used in '%s' and '%s'", caName, ca.ConfigFilePath, c.ConfigFilePath) } err := s.compareDN(c.Config.CA.Certfile, ca.Config.CA.Certfile) if err != nil { return err } } // no conflicts, so add it s.caMap[caName] = ca return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *Server) loadCA(caFile string, renew bool) error {\n\tlog.Infof(\"Loading CA from %s\", caFile)\n\tvar err error\n\n\tif !util.FileExists(caFile) {\n\t\treturn errors.Errorf(\"%s file does not exist\", caFile)\n\t}\n\n\t// Creating new Viper instance, to prevent any server level environment variables or\n\t// flags from overridding the configuration options specified in the\n\t// CA config file\n\tcfg := &CAConfig{}\n\tcaViper := viper.New()\n\terr = UnmarshalConfig(cfg, caViper, caFile, false)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Need to error if no CA name provided in config file, we cannot revert to using\n\t// the name of default CA cause CA names must be unique\n\tcaName := cfg.CA.Name\n\tif caName == \"\" {\n\t\treturn errors.Errorf(\"No CA name provided in CA configuration file. CA name is required in %s\", caFile)\n\t}\n\n\t// Replace missing values in CA configuration values with values from the\n\t// default CA configuration\n\tutil.CopyMissingValues(s.CA.Config, cfg)\n\n\t// Integers and boolean values are handled outside the util.CopyMissingValues\n\t// because there is no way through reflect to detect if a value was explicitly\n\t// set to 0 or false, or it is using the default value for its type. Viper is\n\t// employed here to help detect.\n\tif !caViper.IsSet(\"registry.maxenrollments\") {\n\t\tcfg.Registry.MaxEnrollments = s.CA.Config.Registry.MaxEnrollments\n\t}\n\n\tif !caViper.IsSet(\"db.tls.enabled\") {\n\t\tcfg.DB.TLS.Enabled = s.CA.Config.DB.TLS.Enabled\n\t}\n\n\tlog.Debugf(\"CA configuration after checking for missing values: %+v\", cfg)\n\n\tca, err := newCA(caFile, cfg, s, renew)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.addCA(ca)\n\tif err != nil {\n\t\terr2 := ca.closeDB()\n\t\tif err2 != nil {\n\t\t\tlog.Errorf(\"Close DB failed: %s\", err2)\n\t\t}\n\t}\n\treturn err\n}", "func (c controller) addRouterCAToClusterCA() bool {\n\tctx := utils.GenerateRequestContext()\n\tlog := utils.RequestIDLogger(ctx, c.log)\n\tlog.Infof(\"Start adding ingress ca to cluster\")\n\tcaConfigMap, err := c.kc.GetConfigMap(ingressConfigMapNamespace, ingressConfigMapName)\n\tif err != nil {\n\t\tlog.WithError(err).Warnf(\"fetching %s configmap from %s namespace\", ingressConfigMapName, ingressConfigMapNamespace)\n\t\treturn KeepWaiting\n\t}\n\tlog.Infof(\"Sending ingress certificate to inventory service. Certificate data %s\", caConfigMap.Data[\"ca-bundle.crt\"])\n\terr = c.ic.UploadIngressCa(ctx, caConfigMap.Data[\"ca-bundle.crt\"], c.ClusterID)\n\tif err != nil {\n\t\tlog.WithError(err).Errorf(\"Failed to upload ingress ca to assisted-service\")\n\t\treturn KeepWaiting\n\t}\n\tlog.Infof(\"Ingress ca successfully sent to inventory\")\n\treturn ExitWaiting\n\n}", "func addCACertFromFile(cfg *tls.Config, file string) error {\n\tdata, err := ioutil.ReadFile(file)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif cfg.RootCAs == nil {\n\t\tcfg.RootCAs = x509.NewCertPool()\n\t}\n\tif !cfg.RootCAs.AppendCertsFromPEM(data) {\n\t\treturn errors.New(\"the specified CA file does not contain any valid certificates\")\n\t}\n\n\treturn nil\n}", "func WithAddlCA(ca *x509.Certificate) ServiceOption {\n\treturn func(s *service) error {\n\t\ts.addlCa = append(s.addlCa, ca)\n\t\treturn nil\n\t}\n}", "func AddCertAuth(name string, ca, crl []byte) (*ingress.SSLCert, error) {\n\tcaName := fmt.Sprintf(\"ca_%v.pem\", name)\n\tcaFileName := fmt.Sprintf(\"%v/%v\", ingress.DefaultCACertsDirectory, caName)\n\n\tpemCABlock, _ := pem.Decode(ca)\n\tif pemCABlock == nil {\n\t\treturn nil, fmt.Errorf(\"no valid PEM formatted block found\")\n\t}\n\t// If the first certificate does not start with 'BEGIN CERTIFICATE' it's invalid and must not be used.\n\tif pemCABlock.Type != \"CERTIFICATE\" {\n\t\treturn nil, fmt.Errorf(\"CA file %v contains invalid data, and must be created only with PEM formated certificates\", name)\n\t}\n\n\t_, err := x509.ParseCertificate(pemCABlock.Bytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = ioutil.WriteFile(caFileName, ca, 0644)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"could not write CA file %v: %v\", caFileName, err)\n\t}\n\n\tvar crlFileName string\n\tvar PemSHA string\n\n\tif len(crl) > 0 {\n\t\tcrlName := fmt.Sprintf(\"ca_%v_crl.pem\", name)\n\t\tcrlFileName = fmt.Sprintf(\"%v/%v\", ingress.DefaultCrlDirectory, crlName)\n\n\t\tpemCrlBlock, _ := pem.Decode(crl)\n\t\tif pemCrlBlock == nil {\n\t\t\treturn nil, fmt.Errorf(\"CRL file %v provided contains invalid data, and must be created only with PEM formatted CRL\", name)\n\t\t}\n\n\t\t_, err := x509.ParseCRL(pemCrlBlock.Bytes)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\terr = ioutil.WriteFile(crlFileName, crl, 0644)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"could not write CRL file: %v: %v\", crlFileName, err)\n\t\t}\n\n\t\t// Concatenate the CA and CRL file SHAs together for the PemSHA\n\t\tfilenameSHAs := []string{\n\t\t\tfile.SHA1(caFileName),\n\t\t\tfile.SHA1(crlFileName),\n\t\t}\n\t\tPemSHA = strings.Join(filenameSHAs, \"\")\n\t} else {\n\t\t// Only use the CA filename for a PemSHA\n\t\tPemSHA = file.SHA1(caFileName)\n\t}\n\n\tklog.V(3).Infof(\"Created CA Certificate for Authentication: %v\", caFileName)\n\treturn &ingress.SSLCert{\n\t\tCAFileName: caFileName,\n\t\tCRLFileName: crlFileName,\n\t\tPemFileName: caFileName,\n\t\tPemSHA: PemSHA,\n\t}, nil\n}", "func (t *SimpleChaincode) addCert(APIstub shim.ChaincodeStubInterface, args []string) pb.Response {\n\n\tif len(args) != 6 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 5\")\n\t}\n\tif len(args[0]) <= 0 {\n\t\treturn shim.Error(\"1 argument must be a non-empty string\")\n\t}\n\tif len(args[1]) <= 0 {\n\t\treturn shim.Error(\"2 argument must be a non-empty string\")\n\t}\n\tif len(args[2]) <= 0 {\n\t\treturn shim.Error(\"3 argument must be a non-empty string\")\n\t}\n\tif len(args[3]) <= 0 {\n\t\treturn shim.Error(\"4 argument must be a non-empty string\")\n\t}\n\tif len(args[4]) <= 0 {\n\t\treturn shim.Error(\"5 argument must be a non-empty string\")\n\t}\n\tif len(args[5]) <= 0 {\n\t\treturn shim.Error(\"6 argument must be a non-empty string\")\n\t}\n\n\tPRno := args[0]\n\tCName := args[1]\n\tSeatno := args[2]\n\texamination := args[3]\n\tYOP := args[4]\n\tsub := args[5]\n\n\t// ==== Check if certificate already exists ====\n\tcertAsBytes, err := APIstub.GetState(Seatno)\n\tif err != nil {\n\t\treturn shim.Error(\"Failed to get certificate: \" + err.Error())\n\t} else if certAsBytes != nil {\n\t\treturn shim.Error(\"This certificate already exists: \" + PRno)\n\t}\n\n\t// ==== Create certificate object and marshal to JSON ====\n\tcert := &cert{PRno, BU, CName, Seatno, examination, YOP, sub}\n\n\tcertJSONasBytes, err := json.Marshal(cert)\n\terr = APIstub.PutState(Seatno, certJSONasBytes)\n\tif err != nil {\n\t\treturn shim.Error(fmt.Sprintf(\"Failed to record Cert: %s\", Seatno))\n\t}\n\n\treturn shim.Success(nil)\n}", "func (m *MockManager) AddCA(arg0 string, arg1 util.CAType) error {\n\tret := m.ctrl.Call(m, \"AddCA\", arg0, arg1)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func updateRootCA(cfg *tls.Config, b []byte) bool {\n\tcaCertPool := x509.NewCertPool()\n\tif !caCertPool.AppendCertsFromPEM(b) {\n\t\treturn false\n\t}\n\tcfg.RootCAs = caCertPool\n\treturn true\n}", "func appendCAInfoToCSR(reqConf *csr.CAConfig, csreq *x509.CertificateRequest) error {\n\tpathlen := reqConf.PathLength\n\tif pathlen == 0 && !reqConf.PathLenZero {\n\t\tpathlen = -1\n\t}\n\tval, err := asn1.Marshal(csr.BasicConstraints{true, pathlen})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcsreq.ExtraExtensions = []pkix.Extension{\n\t\t{\n\t\t\tId: asn1.ObjectIdentifier{2, 5, 29, 19},\n\t\t\tValue: val,\n\t\t\tCritical: true,\n\t\t},\n\t}\n\treturn nil\n}", "func Add(mgr manager.Manager) error {\n\tvar err error\n\n\tcaBundle, err = ioutil.ReadFile(filepath.Join(common.CertDir, common.CertPem))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn add(mgr, newReconciler(mgr))\n}", "func loadCA(caFile string) *x509.CertPool {\n\tpool := x509.NewCertPool()\n\n\tif ca, e := ioutil.ReadFile(caFile); e != nil {\n\t\tlog.Fatal(\"ReadFile: \", e)\n\t} else {\n\t\tpool.AppendCertsFromPEM(ca)\n\t}\n\treturn pool\n}", "func (k *CACertificatesCollection) Add(caCert CACertificate) error {\n\ttxn := k.db.Txn(true)\n\tdefer txn.Abort()\n\terr := txn.Insert(caCertTableName, &caCert)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"insert failed\")\n\t}\n\ttxn.Commit()\n\treturn nil\n}", "func appendCAInfoToCSR(reqConf *CAConfig, csr *x509.CertificateRequest) error {\n\tpathlen := reqConf.PathLength\n\tif pathlen == 0 && !reqConf.PathLenZero {\n\t\tpathlen = -1\n\t}\n\tval, err := asn1.Marshal(BasicConstraints{true, pathlen})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tcsr.ExtraExtensions = append(csr.ExtraExtensions, pkix.Extension{\n\t\tId: asn1.ObjectIdentifier{2, 5, 29, 19},\n\t\tValue: val,\n\t\tCritical: true,\n\t})\n\n\treturn nil\n}", "func (s *Server) initializeCA() error {\n\t// Bail if connect isn't enabled.\n\tif !s.config.ConnectEnabled {\n\t\treturn nil\n\t}\n\n\tconf, err := s.initializeCAConfig()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Initialize the provider based on the current config.\n\tprovider, err := s.createCAProvider(conf)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn s.initializeRootCA(provider, conf)\n}", "func loadCA(caFile string) *x509.CertPool {\n\tpool := x509.NewCertPool()\n\n\tif ca, err := ioutil.ReadFile(caFile); err != nil {\n\t\tlog.Fatal(\"Fatal Error at Certification ReadFile: \", err)\n\t} else {\n\t\tpool.AppendCertsFromPEM(ca)\n\t}\n\treturn pool\n}", "func GenerateCA(c *cli.Context) error {\n\thost := c.String(\"host\")\n\n\trsaBits := c.Int(\"rsa-bits\")\n\tecdsaCurve := c.String(\"ecdsa-curve\")\n\n\tvalidFrom := c.String(\"start-date\")\n\n\tvalidFor := c.Duration(\"duration\")\n\tcert, key, err := Ca(host, rsaBits, ecdsaCurve, validFrom, validFor)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create certificate: %s\", err)\n\t}\n\tvar certname = \"0.cert\"\n\tvar keyname = \"0.key\"\n\n\tcertout, err := os.Create(certname)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to open \"+certname+\" for writing: %s\", err)\n\t}\n\tpem.Encode(certout, &cert)\n\tcertout.Close()\n\tlog.Print(\"written \" + certname + \"\\n\")\n\n\tkeyout, err := os.OpenFile(keyname, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\tlog.Print(\"failed to open \"+keyname+\" for writing:\", err)\n\t\treturn nil\n\t}\n\tpem.Encode(keyout, &key)\n\tkeyout.Close()\n\tlog.Print(\"written \" + keyname + \"\\n\")\n\treturn nil\n}", "func (r *TrustedCABundleReconciler) addCloudConfigCABundle(ctx context.Context, proxyCABundle []byte, originalCABundle []byte) ([]byte, []byte, error) {\n\t// Due to installer implementation nuances, 'additionalTrustBundle' does not always end up in Proxy object.\n\t// For handling this situation we have to check synced cloud-config for additional CA bundle presence.\n\t// See https://github.com/openshift/installer/pull/5251#issuecomment-932622321 and\n\t// https://github.com/openshift/installer/pull/5248 for additional context.\n\t// However, some platforms might not have cloud-config at all (AWS), so missed cloud config is not an error.\n\tccmSyncedCloudConfig := &corev1.ConfigMap{}\n\tsyncedCloudConfigObjectKey := types.NamespacedName{Name: syncedCloudConfigMapName, Namespace: r.ManagedNamespace}\n\tif err := r.Get(ctx, syncedCloudConfigObjectKey, ccmSyncedCloudConfig); err != nil {\n\t\tklog.Infof(\"cloud-config was not found: %v\", err)\n\t\treturn nil, originalCABundle, nil\n\t}\n\n\t_, found := ccmSyncedCloudConfig.Data[cloudProviderConfigCABundleConfigMapKey]\n\tif found {\n\t\tklog.Infof(\"additional CA bundle key found in cloud-config\")\n\t\t_, cloudConfigCABundle, err := r.getCABundleConfigMapData(ccmSyncedCloudConfig, cloudProviderConfigCABundleConfigMapKey)\n\t\tif err != nil {\n\t\t\tklog.Warningf(\"failed to parse additional CA bundle from cloud-config, system and proxy CAs will be used: %v\", err)\n\t\t\treturn nil, originalCABundle, nil\n\t\t}\n\t\tif bytes.Equal(proxyCABundle, cloudConfigCABundle) {\n\t\t\tklog.Infof(\"proxy CA and cloud-config CA bundles are equal, no need to merge\")\n\t\t\treturn nil, originalCABundle, nil\n\t\t}\n\t\tklog.Infof(\"proxy CA and cloud-config CA bundles are not equal, merging\")\n\t\tmergedCABundle, err := r.mergeCABundles(cloudConfigCABundle, originalCABundle)\n\t\tif err != nil {\n\t\t\treturn cloudConfigCABundle, nil, fmt.Errorf(\"can not merge system and user trust bundle from cloud-config: %v\", err)\n\t\t}\n\t\treturn cloudConfigCABundle, mergedCABundle, nil\n\t}\n\treturn nil, originalCABundle, nil\n}", "func AddCustomCACert() {\n\tcaCertPath, err := filepath.Abs(\"../fixture/certs/argocd-test-ca.crt\")\n\terrors.CheckError(err)\n\t// We need to setup TLS certs according to whether we are running tests\n\t// against a local workload (repositories available as localhost) and\n\t// against remote workloads (repositories available as argocd-e2e-server)\n\tif fixture.IsLocal() {\n\t\targs := []string{\"cert\", \"add-tls\", \"localhost\", \"--from\", caCertPath}\n\t\terrors.FailOnErr(fixture.RunCli(args...))\n\t\targs = []string{\"cert\", \"add-tls\", \"127.0.0.1\", \"--from\", caCertPath}\n\t\terrors.FailOnErr(fixture.RunCli(args...))\n\t\tcertData, err := os.ReadFile(caCertPath)\n\t\terrors.CheckError(err)\n\t\terr = os.WriteFile(fixture.TmpDir+\"/app/config/tls/localhost\", certData, 0644)\n\t\terrors.CheckError(err)\n\t\terr = os.WriteFile(fixture.TmpDir+\"/app/config/tls/127.0.0.1\", certData, 0644)\n\t\terrors.CheckError(err)\n\t} else {\n\t\targs := []string{\"cert\", \"add-tls\", \"argocd-e2e-server\", \"--from\", caCertPath}\n\t\terrors.FailOnErr(fixture.RunCli(args...))\n\t\tfixture.RestartAPIServer()\n\t\tfixture.RestartRepoServer()\n\t}\n\n}", "func newCA() {\n\terr := os.MkdirAll(shrubCA, 0700)\n\n\tpriv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)\n\tfatalIfErr(err, \"failed to generate the CA key\")\n\tpub := priv.PublicKey\n\n\tserialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)\n\tserialNumber, err := rand.Int(rand.Reader, serialNumberLimit)\n\tfatalIfErr(err, \"failed to generate serial number\")\n\n\tspkiASN1, err := x509.MarshalPKIXPublicKey(&pub)\n\tfatalIfErr(err, \"failed to encode public key\")\n\n\tvar spki struct {\n\t\tAlgorithm pkix.AlgorithmIdentifier\n\t\tSubjectPublicKey asn1.BitString\n\t}\n\t_, err = asn1.Unmarshal(spkiASN1, &spki)\n\tfatalIfErr(err, \"failed to decode public key\")\n\n\tskid := sha1.Sum(spki.SubjectPublicKey.Bytes)\n\n\ttpl := &x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tSubject: pkix.Name{\n\t\t\tOrganization: []string{\"shrubgateway local CA\"},\n\t\t\tOrganizationalUnit: []string{userAndHostname},\n\n\t\t\t// The CommonName is required by iOS to show the certificate in the\n\t\t\t// \"Certificate Trust Settings\" menu.\n\t\t\t// https://github.com/FiloSottile/mkcert/issues/47\n\t\t\tCommonName: \"shrubgateway \" + userAndHostname,\n\t\t},\n\t\tSubjectKeyId: skid[:],\n\n\t\tNotAfter: time.Now().AddDate(10, 0, 0),\n\t\tNotBefore: time.Now().AddDate(0, 0, -1),\n\n\t\tKeyUsage: x509.KeyUsageCertSign,\n\n\t\tBasicConstraintsValid: true,\n\t\tIsCA: true,\n\t\tMaxPathLenZero: true,\n\n\t\t// for security reasons the local CA generated is restricted\n\t\t// to subdomains of \".localhost\"\n\t\tPermittedDNSDomains: []string{\".localhost\"},\n\t}\n\n\tcert, err := x509.CreateCertificate(rand.Reader, tpl, tpl, &pub, priv)\n\tfatalIfErr(err, \"failed to generate CA certificate\")\n\n\tprivDER, err := x509.MarshalPKCS8PrivateKey(priv)\n\tfatalIfErr(err, \"failed to encode CA key\")\n\terr = ioutil.WriteFile(filepath.Join(shrubCA, rootKeyName), pem.EncodeToMemory(\n\t\t&pem.Block{Type: \"PRIVATE KEY\", Bytes: privDER}), 0400)\n\tfatalIfErr(err, \"failed to save CA key\")\n\n\terr = ioutil.WriteFile(filepath.Join(shrubCA, rootName), pem.EncodeToMemory(\n\t\t&pem.Block{Type: \"CERTIFICATE\", Bytes: cert}), 0644)\n\tfatalIfErr(err, \"failed to save CA key\")\n\n\tlog.Printf(\"Created a new local CA at \\\"%s\\\"\\n\", shrubCA)\n}", "func storeCAChain(config *lib.ClientConfig, si *lib.GetCAInfoResponse) error {\n\tmspDir := config.MSPDir\n\t// Get a unique name to use for filenames\n\tserverURL, err := url.Parse(config.URL)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfname := serverURL.Host\n\tif config.CAName != \"\" {\n\t\tfname = fmt.Sprintf(\"%s-%s\", fname, config.CAName)\n\t}\n\tfname = strings.Replace(fname, \":\", \"-\", -1)\n\tfname = strings.Replace(fname, \".\", \"-\", -1) + \".pem\"\n\ttlsfname := fmt.Sprintf(\"tls-%s\", fname)\n\n\trootCACertsDir := path.Join(mspDir, \"cacerts\")\n\tintCACertsDir := path.Join(mspDir, \"intermediatecerts\")\n\ttlsRootCACertsDir := path.Join(mspDir, \"tlscacerts\")\n\ttlsIntCACertsDir := path.Join(mspDir, \"tlsintermediatecerts\")\n\n\tvar rootBlks [][]byte\n\tvar intBlks [][]byte\n\tchain := si.CAChain\n\tfor len(chain) > 0 {\n\t\tvar block *pem.Block\n\t\tblock, chain = pem.Decode(chain)\n\t\tif block == nil {\n\t\t\tbreak\n\t\t}\n\n\t\tcert, err := x509.ParseCertificate(block.Bytes)\n\t\tif err != nil {\n\t\t\treturn errors.Wrap(err, \"Failed to parse certificate in the CA chain\")\n\t\t}\n\n\t\tif !cert.IsCA {\n\t\t\treturn errors.New(\"A certificate in the CA chain is not a CA certificate\")\n\t\t}\n\n\t\t// If authority key id is not present or if it is present and equal to subject key id,\n\t\t// then it is a root certificate\n\t\tif len(cert.AuthorityKeyId) == 0 || bytes.Equal(cert.AuthorityKeyId, cert.SubjectKeyId) {\n\t\t\trootBlks = append(rootBlks, pem.EncodeToMemory(block))\n\t\t} else {\n\t\t\tintBlks = append(intBlks, pem.EncodeToMemory(block))\n\t\t}\n\t}\n\n\t// Store the root certificates in the \"cacerts\" msp folder\n\tcertBytes := bytes.Join(rootBlks, []byte(\"\"))\n\tif len(certBytes) > 0 {\n\t\tif config.Enrollment.Profile == \"tls\" {\n\t\t\terr := storeToFile(\"TLS root CA certificate\", tlsRootCACertsDir, tlsfname, certBytes)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\terr = storeToFile(\"root CA certificate\", rootCACertsDir, fname, certBytes)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\t// Store the intermediate certificates in the \"intermediatecerts\" msp folder\n\tcertBytes = bytes.Join(intBlks, []byte(\"\"))\n\tif len(certBytes) > 0 {\n\t\tif config.Enrollment.Profile == \"tls\" {\n\t\t\terr = storeToFile(\"TLS intermediate certificates\", tlsIntCACertsDir, tlsfname, certBytes)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\terr = storeToFile(\"intermediate CA certificates\", intCACertsDir, fname, certBytes)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (mr *MockManagerMockRecorder) AddCA(arg0, arg1 interface{}) *gomock.Call {\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"AddCA\", reflect.TypeOf((*MockManager)(nil).AddCA), arg0, arg1)\n}", "func (h *Handler) createCASecret(config *gkev1.GKEClusterConfig, cluster *gkeapi.Cluster) error {\n\tvar err error\n\tendpoint := cluster.Endpoint\n\tvar ca []byte\n\tif cluster.MasterAuth != nil {\n\t\tca = []byte(cluster.MasterAuth.ClusterCaCertificate)\n\t}\n\n\t_, err = h.secrets.Create(\n\t\t&corev1.Secret{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: config.Name,\n\t\t\t\tNamespace: config.Namespace,\n\t\t\t\tOwnerReferences: []metav1.OwnerReference{\n\t\t\t\t\t{\n\t\t\t\t\t\tAPIVersion: gkev1.SchemeGroupVersion.String(),\n\t\t\t\t\t\tKind: gkeClusterConfigKind,\n\t\t\t\t\t\tUID: config.UID,\n\t\t\t\t\t\tName: config.Name,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tData: map[string][]byte{\n\t\t\t\t\"endpoint\": []byte(endpoint),\n\t\t\t\t\"ca\": ca,\n\t\t\t},\n\t\t})\n\tif errors.IsAlreadyExists(err) {\n\t\tlogrus.Debugf(\"CA secret [%s] already exists, ignoring\", config.Name)\n\t\treturn nil\n\t}\n\treturn err\n}", "func AppendCertificate(config *tls.Config, crt, key string) error {\n\tfmt.Println(\"TLS: Loading certificates (crt, key):\", crt, key)\n\tcer, err := tls.LoadX509KeyPair(crt, key)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn err\n\t}\n\tconfig.Certificates = append(config.Certificates, cer)\n\treturn nil\n}", "func (c *certManager) addCertificate(domain string, sans []string, renew bool) (*store.Certificate, error) {\n\t// see if the domain already has a certificate\n\tstoreCert, err := c.store.GetCertificate(domain)\n\tif err != nil {\n\t\treturn nil, logger.Errore(err)\n\t}\n\n\tif storeCert != nil {\n\t\tnewSans := diffStrings(storeCert.AlternativeNames, sans)\n\n\t\tif !renew && len(newSans) == 0 {\n\t\t\tlogger.Debug(\"already have certificate\",\n\t\t\t\tgolog.String(\"domain\", domain),\n\t\t\t\tgolog.Strings(\"sans\", sans),\n\t\t\t)\n\t\t\treturn storeCert, nil\n\t\t}\n\n\t\t// add new sans to the existing list for renewal\n\t\tsans = append(sans, newSans...)\n\t}\n\n\t// creating new cert, so authorize all the domains first\n\tauthList := make([]string, len(sans)+1)\n\tcopy(authList, sans)\n\tauthList[len(sans)] = domain\n\n\terr = c.AuthorizeAll(authList)\n\tif err != nil {\n\t\treturn nil, logger.Errore(err)\n\t}\n\n\t// create the cert\n\tcert, err := c.client.CreateCertificate(context.Background(), domain, sans)\n\tif err != nil {\n\t\treturn nil, logger.Errore(err)\n\t}\n\n\t// encrypt private key\n\tkey, err := c.secretbox.Seal(cert.PrivateKeyPEM())\n\tif err != nil {\n\t\treturn nil, logger.Errore(err)\n\t}\n\n\tstoreCert = &store.Certificate{\n\t\tSubject: domain,\n\t\tAlternativeNames: sans,\n\t\tCertificateChain: cert.CertificatesPEM(),\n\t\tPrivateKey: key,\n\t\tExpires: cert.Certificates[0].NotAfter,\n\t}\n\n\terr = c.store.PutCertificate(storeCert)\n\tif err != nil {\n\t\treturn nil, logger.Errore(err)\n\t}\n\n\treturn storeCert, nil\n}", "func (rc *ResourceCommand) createCertAuthority(ctx context.Context, client auth.ClientI, raw services.UnknownResource) error {\n\tcertAuthority, err := services.UnmarshalCertAuthority(raw.Raw)\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\tif err := client.UpsertCertAuthority(ctx, certAuthority); err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\tfmt.Printf(\"certificate authority '%s' has been updated\\n\", certAuthority.GetName())\n\treturn nil\n}", "func caBundle(opts *git.AuthOptions) []byte {\n\tif opts == nil {\n\t\treturn nil\n\t}\n\treturn opts.CAFile\n}", "func (r *reconciler) ensureRouterCAConfigMap(secret *corev1.Secret, ingresses []ingressv1alpha1.ClusterIngress) error {\n\tdesired, err := desiredRouterCAConfigMap(secret, ingresses)\n\tif err != nil {\n\t\treturn err\n\t}\n\tcurrent, err := r.currentRouterCAConfigMap()\n\tif err != nil {\n\t\treturn err\n\t}\n\tswitch {\n\tcase desired == nil && current == nil:\n\t\t// Nothing to do.\n\tcase desired == nil && current != nil:\n\t\tif err := r.deleteRouterCAConfigMap(current); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to ensure router CA was unpublished: %v\", err)\n\t\t}\n\tcase desired != nil && current == nil:\n\t\tif err := r.createRouterCAConfigMap(desired); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to ensure router CA was published: %v\", err)\n\t\t}\n\tcase desired != nil && current != nil:\n\t\tif err := r.updateRouterCAConfigMap(current, desired); err != nil {\n\t\t\treturn fmt.Errorf(\"failed to update published router CA: %v\", err)\n\t\t}\n\t}\n\treturn nil\n}", "func cacerts(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\taps := chi.URLParam(r, apsParamName)\n\n\tcerts, err := caFromContext(ctx).CACerts(ctx, aps, r)\n\tif writeOnError(ctx, w, logMsgCACertsFailed, err) {\n\t\treturn\n\t}\n\n\t// Update CA certificates cache with each explicit call to /cacerts.\n\tcertCacheFromContext(ctx).Add(aps, certs)\n\n\twriteResponse(w, mimeTypePKCS7, true, certs)\n}", "func (a *Server) activateCertAuthority(t types.TrustedCluster) error {\n\terr := a.ActivateCertAuthority(types.CertAuthID{Type: types.UserCA, DomainName: t.GetName()})\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\n\treturn trace.Wrap(a.ActivateCertAuthority(types.CertAuthID{Type: types.HostCA, DomainName: t.GetName()}))\n}", "func (s *Server) initMultiCAConfig() (err error) {\n\tcfg := s.Config\n\tif cfg.CAcount != 0 && len(cfg.CAfiles) > 0 {\n\t\treturn errors.New(\"The --cacount and --cafiles options are mutually exclusive\")\n\t}\n\tif cfg.CAcfg.Intermediate.ParentServer.URL != \"\" && cfg.CAcount > 0 {\n\t\treturn errors.New(\"The --cacount option is not permissible for an intermediate server; use the --cafiles option instead\")\n\t}\n\tcfg.CAfiles, err = util.NormalizeFileList(cfg.CAfiles, s.HomeDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Multi-CA related configuration initialization\n\ts.caMap = make(map[string]*CA)\n\tif cfg.CAcount >= 1 {\n\t\ts.createDefaultCAConfigs(cfg.CAcount)\n\t}\n\tif len(cfg.CAfiles) != 0 {\n\t\tlog.Debugf(\"Default CA configuration, if necessary, will be used to replace missing values for additional CAs: %+v\", s.Config.CAcfg)\n\t\tlog.Debugf(\"Additional CAs to be started: %s\", cfg.CAfiles)\n\t\tcaFiles := util.NormalizeStringSlice(cfg.CAfiles)\n\t\tfor _, caFile := range caFiles {\n\t\t\terr = s.loadCA(caFile, false)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (r *ReconcileRethinkDBCluster) reconcileCAConfigMap(cr *rethinkdbv1alpha1.RethinkDBCluster, caSecret *corev1.Secret) error {\n\tname := fmt.Sprintf(\"%s-ca\", cr.Name)\n\tfound := &corev1.ConfigMap{}\n\n\terr := r.client.Get(context.TODO(), types.NamespacedName{Name: name, Namespace: cr.Namespace}, found)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tlog.Info(\"creating new configmap\", \"configmap\", name)\n\t\tcm, err := newCAConfigMap(cr, caSecret)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// Set RethinkDBCluster instance as the owner and controller\n\t\tif err = controllerutil.SetControllerReference(cr, cm, r.scheme); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\treturn r.client.Create(context.TODO(), cm)\n\t} else if err != nil {\n\t\treturn err\n\t}\n\n\tlog.Info(\"configmap exists\", \"configmap\", found.Name)\n\treturn nil\n}", "func (ctx *serverRequestContextImpl) getCA() (*CA, error) {\n\tif ctx.ca == nil {\n\t\t// Get the CA name\n\t\tname, err := ctx.getCAName()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Get the CA by its name\n\t\tctx.ca, err = ctx.endpoint.Server.GetCA(name)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn ctx.ca, nil\n}", "func getCaFileAndDownloadIfRequired(c *cli.Context) (string, error) {\n\t// have we done this already?\n\tif len(caFile) > 0 {\n\t\treturn caFile, nil\n\t}\n\tca := c.String(FlagCa)\n\t// Detect if using a URL scheme\n\tif uri, _ := url.ParseRequestURI(ca); uri != nil {\n\t\tif uri.Scheme == \"\" {\n\t\t\t// Not a URL, get out of here\n\t\t\treturn ca, nil\n\t\t}\n\t}\n\t// Where should we save the ca?\n\tif c.IsSet(FlagCaFile) {\n\t\tcaFile = c.String(FlagCaFile)\n\t} else {\n\t\t// This is used by cleanup\n\t\tcaFile = filepath.Join(getKdTempDir(), \"kube-ca.pem\")\n\t}\n\n\t// skip download if ca file already exists\n\tif found, err := FilesExists(caFile); err != nil {\n\t\treturn \"\", err\n\t} else if found {\n\t\tlogDebug.Printf(\"ca file (%s) already exists, skipping download from: %s\", caFile, ca)\n\t\treturn caFile, nil\n\t}\n\n\tlogDebug.Printf(\"ca file specified as %s, to download from %s\", caFile, ca)\n\t// download the ca...\n\tresp, err := grab.Get(caFile, ca)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\n\t\t\t\"problem downloading ca from %s:%s\", resp.Filename, err)\n\t}\n\treturn caFile, nil\n}", "func createCertificateAuthority(path, content string) error {\n\t// This hardcoded certificate authority\n\tif found, err := FilesExists(path); err != nil {\n\t\treturn err\n\t} else if found {\n\t\treturn nil\n\t}\n\n\t// Write the file to disk\n\tif err := ioutil.WriteFile(path, []byte(content), 0444); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (f5 *f5LTM) AddCert(routename, hostname, cert, privkey,\n\tdestCACert string) error {\n\tif f5.privkey == \"\" {\n\t\treturn fmt.Errorf(\"Cannot configure TLS for route %s\"+\n\t\t\t\" because router was not provided an SSH private key\",\n\t\t\troutename)\n\t}\n\n\tvar deleteServerSslProfile,\n\t\tdeleteClientSslProfileFromVserver, deleteClientSslProfile,\n\t\tdeletePrivateKey, deleteCert, deleteCACert bool\n\n\tsuccess := false\n\n\tdefer func() {\n\t\tif success != true {\n\t\t\tf5.deleteCertParts(routename, false, deleteServerSslProfile,\n\t\t\t\tdeleteClientSslProfileFromVserver, deleteClientSslProfile,\n\t\t\t\tdeletePrivateKey, deleteCert, deleteCACert)\n\t\t}\n\t}()\n\n\tvar err error\n\n\tcertname := fmt.Sprintf(\"%s-https-cert\", routename)\n\terr = f5.uploadCert(cert, certname)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdeleteCert = true\n\n\tkeyname := fmt.Sprintf(\"%s-https-key\", routename)\n\terr = f5.uploadKey(privkey, keyname)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdeletePrivateKey = true\n\n\tclientSslProfileName := fmt.Sprintf(\"%s-client-ssl-profile\", routename)\n\terr = f5.createClientSslProfile(clientSslProfileName,\n\t\thostname, certname, keyname)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdeleteClientSslProfile = true\n\n\terr = f5.associateClientSslProfileWithVserver(clientSslProfileName,\n\t\tf5.httpsVserver)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdeleteClientSslProfileFromVserver = true\n\n\tif destCACert != \"\" {\n\t\tcacertname := fmt.Sprintf(\"%s-https-chain\", routename)\n\t\terr = f5.uploadCert(destCACert, cacertname)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdeleteCACert = true\n\n\t\tserverSslProfileName := fmt.Sprintf(\"%s-server-ssl-profile\", routename)\n\t\terr = f5.createServerSslProfile(serverSslProfileName,\n\t\t\thostname, cacertname)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdeleteServerSslProfile = true\n\n\t\terr = f5.associateServerSslProfileWithVserver(serverSslProfileName,\n\t\t\tf5.httpsVserver)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tsuccess = true\n\n\treturn nil\n}", "func CAFile(caFile string) Option {\n\treturn func(opts *options) {\n\t\topts.caFile = caFile\n\t}\n}", "func (c EasyCert) generateCA(caFile string) (*x509.Certificate, crypto.PrivateKey, error) {\n\ttemplate := c.newCertificate()\n\ttemplate.IsCA = true\n\ttemplate.KeyUsage |= x509.KeyUsageCertSign\n\ttemplate.Subject.CommonName = c.org\n\n\tpriv, err := c.newPrivateKey()\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tderBytes, err := x509.CreateCertificate(rand.Reader, template, template, priv.(crypto.Signer).Public(), priv)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tca, err := x509.ParseCertificate(derBytes)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tcertOut, err := os.Create(caFile)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tdefer certOut.Close()\n\tif err := pem.Encode(certOut, &pem.Block{Type: \"CERTIFICATE\", Bytes: derBytes}); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn ca, priv, nil\n}", "func Add(mgr manager.Manager) error {\n\turl := os.Getenv(\"CONCOURSECI_URL\")\n\ttarget := os.Getenv(\"CONCOURSECI_TARGET\")\n\tteam := os.Getenv(\"CONCOURSECI_TEAM\")\n\tusername := os.Getenv(\"CONCOURSECI_USERNAME\")\n\tpassword := os.Getenv(\"CONCOURSECI_PASSWORD\")\n\treturn add(mgr, newReconciler(mgr, concourseci.NewClient(url, target, team, username, password)))\n}", "func (r *TrustedCABundleReconciler) addProxyCABundle(ctx context.Context, proxyConfig *configv1.Proxy, originalCABundle []byte) ([]byte, []byte, error) {\n\tif isSpecTrustedCASet(&proxyConfig.Spec) {\n\t\tuserProxyCABundle, err := r.getUserProxyCABundle(ctx, proxyConfig.Spec.TrustedCA.Name)\n\t\tif err != nil {\n\t\t\tklog.Warningf(\"failed to get user defined proxy trust bundle, system CA will be used: %v\", err)\n\t\t\treturn nil, originalCABundle, nil\n\t\t}\n\t\tresultCABundle, err := r.mergeCABundles(userProxyCABundle, originalCABundle)\n\t\tif err != nil {\n\t\t\treturn userProxyCABundle, nil, fmt.Errorf(\"can not merge system and user trust bundles: %v\", err)\n\t\t}\n\t\treturn userProxyCABundle, resultCABundle, nil\n\t}\n\treturn nil, originalCABundle, nil\n}", "func writeCA(source io.Reader) (string, error) {\n\tf, err := ioutil.TempFile(\"\", \"ovirt-ca-bundle\")\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer f.Close()\n\tcontent, err := ioutil.ReadAll(source)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\t_, err = f.Write(content)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn f.Name(), nil\n}", "func reconcileCABundle(ctx context.Context, client client.Client, config *runtimev1.ExtensionConfig) error {\n\tlog := ctrl.LoggerFrom(ctx)\n\n\tsecretNameRaw, ok := config.Annotations[runtimev1.InjectCAFromSecretAnnotation]\n\tif !ok {\n\t\treturn nil\n\t}\n\tsecretName := splitNamespacedName(secretNameRaw)\n\n\tlog.Info(fmt.Sprintf(\"Injecting CA Bundle into ExtensionConfig from secret %q\", secretNameRaw))\n\n\tif secretName.Namespace == \"\" || secretName.Name == \"\" {\n\t\treturn errors.Errorf(\"failed to reconcile caBundle: secret name %q must be in the form <namespace>/<name>\", secretNameRaw)\n\t}\n\n\tvar secret corev1.Secret\n\t// Note: this is an expensive API call because secrets are explicitly not cached.\n\tif err := client.Get(ctx, secretName, &secret); err != nil {\n\t\treturn errors.Wrapf(err, \"failed to reconcile caBundle: failed to get secret %q\", secretNameRaw)\n\t}\n\n\tcaData, hasCAData := secret.Data[tlsCAKey]\n\tif !hasCAData {\n\t\treturn errors.Errorf(\"failed to reconcile caBundle: secret %s does not contain a %q entry\", secretNameRaw, tlsCAKey)\n\t}\n\n\tconfig.Spec.ClientConfig.CABundle = caData\n\treturn nil\n}", "func (c *Certinator) CreateCA(name string) (err error) {\n\tconfig := map[string]interface{}{\n\t\t\"options\": nil,\n\t\t\"default_lease_ttl\": \"0s\",\n\t\t\"max_lease_ttl\": DEFAULT_CA_MAX_LEASE,\n\t\t\"force_no_cache\": false,\n\t}\n\n\tdata := map[string]interface{}{\n\t\t\"type\": \"pki\",\n\t\t\"description\": fmt.Sprintf(\"%s certificate authority\", name),\n\t\t\"config\": config,\n\t}\n\n\tpath := fmt.Sprintf(\"sys/mounts/%s\", name)\n\n\t_, err = c.Client.Logical().Write(path, data)\n\tif err != nil {\n\t\terr = errors.Wrapf(err, \"failed creating %s CA\", name)\n\t\treturn err\n\t}\n\n\treturn err\n}", "func (c *Certinator) TuneCA(name string) (err error) {\n\tdata := map[string]interface{}{\n\t\t\"options\": nil,\n\t\t\"default_lease_ttl\": \"\",\n\t\t\"max_lease_ttl\": DEFAULT_CA_MAX_LEASE,\n\t\t\"force_no_cache\": false,\n\t}\n\n\tpath := fmt.Sprintf(\"sys/mounts/%s/tune\", name)\n\n\t_, err = c.Client.Logical().Write(path, data)\n\tif err != nil {\n\t\terr = errors.Wrapf(err, \"failed creating %s CA\", name)\n\t\treturn err\n\t}\n\n\treturn err\n}", "func CA(cn string) x509.Certificate {\n\treturn x509.Certificate{\n\t\tSubject: pkix.Name{\n\t\t\tCommonName: cn,\n\t\t},\n\t\tIsCA: true,\n\t\tKeyUsage: x509.KeyUsageCertSign | x509.KeyUsageCRLSign | x509.KeyUsageDigitalSignature,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth, x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageOCSPSigning},\n\t}\n}", "func GenCerts(hosts []string, outname string, isCA bool) (err error) {\n\tpriv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"GenerateKey: %v\", err)\n\t}\n\ttemplate := x509.Certificate{\n\t\tSerialNumber: big.NewInt(1),\n\t\tSubject: pkix.Name{\n\t\t\tOrganization: []string{\"Acme Co\"},\n\t\t},\n\t\tNotBefore: time.Now(),\n\t\tNotAfter: time.Now().Add(time.Hour * 24 * 3650),\n\n\t\tKeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},\n\t\tBasicConstraintsValid: true,\n\t}\n\n\tvar (\n\t\tcakey *ecdsa.PrivateKey\n\t\tcacrt *x509.Certificate\n\t\tderBytes []byte\n\t)\n\n\t// valid for these names\n\tif isCA {\n\t\ttemplate.IsCA = true\n\t\ttemplate.KeyUsage |= x509.KeyUsageCertSign\n\t\toutname = \"ca\"\n\t\tderBytes, err = x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create certificate: %v\", err)\n\t\t}\n\t} else {\n\t\tfor _, h := range hosts {\n\t\t\tif ip := net.ParseIP(h); ip != nil {\n\t\t\t\ttemplate.IPAddresses = append(template.IPAddresses, ip)\n\t\t\t} else {\n\t\t\t\ttemplate.DNSNames = append(template.DNSNames, h)\n\t\t\t}\n\t\t}\n\n\t\t// ca key file\n\t\tca_data, err := os.ReadFile(\"ca-key.pem\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Read ca-key.pem: %v\", err)\n\t\t}\n\t\tblock, _ := pem.Decode(ca_data)\n\t\tcakey, _ = x509.ParseECPrivateKey(block.Bytes)\n\n\t\t// ca cert file\n\t\tca_data, err = os.ReadFile(\"ca-cert.pem\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Read ca-cert.pem: %v\", err)\n\t\t}\n\t\tblock, _ = pem.Decode(ca_data)\n\t\tcacrt, _ = x509.ParseCertificate(block.Bytes)\n\n\t\t// generate C2 server certificate, signed by our CA\n\t\tderBytes, err = x509.CreateCertificate(rand.Reader, &template, cacrt, publicKey(priv), cakey)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create certificate: %v\", err)\n\t\t}\n\t}\n\n\t// output to pem files\n\tout := &bytes.Buffer{}\n\toutcert := fmt.Sprintf(\"%s-cert.pem\", outname)\n\toutkey := fmt.Sprintf(\"%s-key.pem\", outname)\n\t// cert\n\tpem.Encode(out, &pem.Block{Type: \"CERTIFICATE\", Bytes: derBytes})\n\terr = os.WriteFile(outcert, out.Bytes(), 0600)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Write %s: %v\", outcert, err)\n\t}\n\tout.Reset()\n\n\t// key\n\tpem.Encode(out, pemBlockForKey(priv))\n\terr = os.WriteFile(outkey, out.Bytes(), 0600)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Write %s: %v\", outkey, err)\n\t}\n\n\treturn\n}", "func (r *reconciler) createRouterCAConfigMap(cm *corev1.ConfigMap) error {\n\tif err := r.Client.Create(context.TODO(), cm); err != nil {\n\t\tif errors.IsAlreadyExists(err) {\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\tlogrus.Infof(\"created configmap %s/%s\", cm.Namespace, cm.Name)\n\treturn nil\n}", "func (o *LDAPIdentityProvider) CA() string {\n\tif o != nil && o.bitmap_&1 != 0 {\n\t\treturn o.ca\n\t}\n\treturn \"\"\n}", "func caUpdateCmd(cmd *cli.Cmd) {\n\tcmd.Spec = \"NAME [OPTIONS]\"\n\n\tparams := controller.NewCAParams()\n\tparams.Name = cmd.StringArg(\"NAME\", \"\", \"name of CA\")\n\n\tparams.CertFile = cmd.StringOpt(\"cert\", \"\", \"certificate PEM file\")\n\tparams.KeyFile = cmd.StringOpt(\"key\", \"\", \"key PEM file\")\n\tparams.Tags = cmd.StringOpt(\"tags\", \"\", \"comma separated list of tags\")\n\tparams.CaExpiry = cmd.IntOpt(\"ca-expiry\", 0, \"CA expiry period in days\")\n\tparams.CertExpiry = cmd.IntOpt(\"cert-expiry\", 0, \"Certificate expiry period in days\")\n\tparams.DnLocality = cmd.StringOpt(\"dn-l\", \"\", \"Locality for DN scope\")\n\tparams.DnState = cmd.StringOpt(\"dn-st\", \"\", \"State/province for DN scope\")\n\tparams.DnOrg = cmd.StringOpt(\"dn-o\", \"\", \"Organization for DN scope\")\n\tparams.DnOrgUnit = cmd.StringOpt(\"dn-ou\", \"\", \"Organizational unit for DN scope\")\n\tparams.DnCountry = cmd.StringOpt(\"dn-c\", \"\", \"Country for DN scope\")\n\tparams.DnStreet = cmd.StringOpt(\"dn-street\", \"\", \"Street for DN scope\")\n\tparams.DnPostal = cmd.StringOpt(\"dn-postal\", \"\", \"PostalCode for DN scope\")\n\n\tcmd.Action = func() {\n\t\tapp := NewAdminApp()\n\t\tlogger.Info(\"updating CA\")\n\n\t\tcont, err := controller.NewCA(app.env)\n\t\tif err != nil {\n\t\t\tapp.Fatal(err)\n\t\t}\n\n\t\tif err := cont.Update(params); err != nil {\n\t\t\tapp.Fatal(err)\n\t\t}\n\t}\n}", "func ValidateCA(attribute string, pemdata string) error {\n\n\tif pemdata == \"\" {\n\t\treturn nil\n\t}\n\n\tvar i int\n\tvar block *pem.Block\n\trest := []byte(pemdata)\n\n\tfor {\n\t\tblock, rest = pem.Decode(rest)\n\n\t\tif block == nil {\n\t\t\treturn makeValidationError(attribute, \"Unable to decode PEM\")\n\t\t}\n\n\t\tif len(rest) == 0 {\n\t\t\tbreak\n\t\t}\n\t\ti++\n\t}\n\n\tcacert, err := x509.ParseCertificate(block.Bytes)\n\tif err != nil {\n\t\treturn makeValidationError(attribute, fmt.Sprintf(\"Unable to parse x509 certificate: %s\", err))\n\t}\n\n\tif !cacert.IsCA {\n\t\treturn makeValidationError(attribute, \"Given x509 certificate is not a CA\")\n\t}\n\n\treturn nil\n}", "func TestConnectCALeaf_changingRoots(t *testing.T) {\n\tt.Parallel()\n\n\trequire := require.New(t)\n\trpc := TestRPC(t)\n\tdefer rpc.AssertExpectations(t)\n\n\ttyp, rootsCh := testCALeafType(t, rpc)\n\tdefer close(rootsCh)\n\n\tcaRoot := connect.TestCA(t, nil)\n\tcaRoot.Active = true\n\trootsCh <- structs.IndexedCARoots{\n\t\tActiveRootID: caRoot.ID,\n\t\tTrustDomain: \"fake-trust-domain.consul\",\n\t\tRoots: []*structs.CARoot{\n\t\t\tcaRoot,\n\t\t},\n\t\tQueryMeta: structs.QueryMeta{Index: 1},\n\t}\n\n\t// We need this later but needs to be defined so we sign second CSR with it\n\t// otherwise we break the cert root checking.\n\tcaRoot2 := connect.TestCA(t, nil)\n\n\t// Instrument ConnectCA.Sign to return signed cert\n\tvar resp *structs.IssuedCert\n\tvar idx uint64\n\n\trpc.On(\"RPC\", \"ConnectCA.Sign\", mock.Anything, mock.Anything).Return(nil).\n\t\tRun(func(args mock.Arguments) {\n\t\t\tca := caRoot\n\t\t\tcIdx := atomic.AddUint64(&idx, 1)\n\t\t\tif cIdx > 1 {\n\t\t\t\t// Second time round use the new CA\n\t\t\t\tca = caRoot2\n\t\t\t}\n\t\t\treply := args.Get(2).(*structs.IssuedCert)\n\t\t\tleaf, _ := connect.TestLeaf(t, \"web\", ca)\n\t\t\treply.CertPEM = leaf\n\t\t\treply.ValidAfter = time.Now().Add(-1 * time.Hour)\n\t\t\treply.ValidBefore = time.Now().Add(11 * time.Hour)\n\t\t\treply.CreateIndex = cIdx\n\t\t\treply.ModifyIndex = reply.CreateIndex\n\t\t\tresp = reply\n\t\t})\n\n\t// We'll reuse the fetch options and request\n\topts := cache.FetchOptions{MinIndex: 0, Timeout: 10 * time.Second}\n\treq := &ConnectCALeafRequest{Datacenter: \"dc1\", Service: \"web\"}\n\n\t// First fetch should return immediately\n\tfetchCh := TestFetchCh(t, typ, opts, req)\n\tselect {\n\tcase <-time.After(100 * time.Millisecond):\n\t\tt.Fatal(\"shouldn't block waiting for fetch\")\n\tcase result := <-fetchCh:\n\t\tv := mustFetchResult(t, result)\n\t\trequire.Equal(resp, v.Value)\n\t\trequire.Equal(uint64(1), v.Index)\n\t\t// Set the LastResult for subsequent fetches\n\t\topts.LastResult = &v\n\t}\n\n\t// Second fetch should block with set index\n\topts.MinIndex = 1\n\tfetchCh = TestFetchCh(t, typ, opts, req)\n\tselect {\n\tcase result := <-fetchCh:\n\t\tt.Fatalf(\"should not return: %#v\", result)\n\tcase <-time.After(100 * time.Millisecond):\n\t}\n\n\t// Let's send in new roots, which should trigger the sign req. We need to take\n\t// care to set the new root as active\n\tcaRoot2.Active = true\n\tcaRoot.Active = false\n\trootsCh <- structs.IndexedCARoots{\n\t\tActiveRootID: caRoot2.ID,\n\t\tTrustDomain: \"fake-trust-domain.consul\",\n\t\tRoots: []*structs.CARoot{\n\t\t\tcaRoot2,\n\t\t\tcaRoot,\n\t\t},\n\t\tQueryMeta: structs.QueryMeta{Index: atomic.AddUint64(&idx, 1)},\n\t}\n\tselect {\n\tcase <-time.After(100 * time.Millisecond):\n\t\tt.Fatal(\"shouldn't block waiting for fetch\")\n\tcase result := <-fetchCh:\n\t\tv := mustFetchResult(t, result)\n\t\trequire.Equal(resp, v.Value)\n\t\t// 3 since the second CA \"update\" used up 2\n\t\trequire.Equal(uint64(3), v.Index)\n\t\t// Set the LastResult for subsequent fetches\n\t\topts.LastResult = &v\n\t\topts.MinIndex = 3\n\t}\n\n\t// Third fetch should block\n\tfetchCh = TestFetchCh(t, typ, opts, req)\n\tselect {\n\tcase result := <-fetchCh:\n\t\tt.Fatalf(\"should not return: %#v\", result)\n\tcase <-time.After(100 * time.Millisecond):\n\t}\n}", "func TemplateCA(cn string) *x509.Certificate {\n\tca := Template(cn)\n\tca.KeyUsage = x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign | x509.KeyUsageCRLSign\n\tca.IsCA = true\n\tca.MaxPathLen = certMaxPathLen\n\tca.MaxPathLenZero = true\n\treturn ca\n}", "func RenewCA(ca CAType) (CAType, error) {\n\t// regenerate the CA cert *with the same key*\n\tparsedCert, err := helpers.ParseCertificatePEM([]byte(ca.Cert))\n\tif err != nil {\n\t\treturn CAType{}, errors.Wrap(err, \"parse ca certificate\")\n\t}\n\n\tparsedKey, err := helpers.ParsePrivateKeyPEM([]byte(ca.Key))\n\tif err != nil {\n\t\treturn CAType{}, errors.Wrap(err, \"parse ca key\")\n\t}\n\n\tnewCABytes, err := initca.RenewFromSigner(parsedCert, parsedKey)\n\tif err != nil {\n\t\treturn CAType{}, errors.Wrap(err, \"renew ca certificate\")\n\t}\n\n\tca.Cert = string(newCABytes)\n\treturn ca, nil\n}", "func (a *Client) ContractAdd(params *ContractAddParams, authInfo runtime.ClientAuthInfoWriter, opts ...ClientOption) (*ContractAddOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewContractAddParams()\n\t}\n\top := &runtime.ClientOperation{\n\t\tID: \"contractAdd\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/contract\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &ContractAddReader{formats: a.formats},\n\t\tAuthInfo: authInfo,\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t}\n\tfor _, opt := range opts {\n\t\topt(op)\n\t}\n\n\tresult, err := a.transport.Submit(op)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*ContractAddOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for contractAdd: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (r *OrganizationCertificateBasedAuthConfigurationCollectionRequest) Add(ctx context.Context, reqObj *CertificateBasedAuthConfiguration) (resObj *CertificateBasedAuthConfiguration, err error) {\n\terr = r.JSONRequest(ctx, \"POST\", \"\", reqObj, &resObj)\n\treturn\n}", "func (m *NetManager) serverAdd(key string, con cm.Connection) error {\n\n\tm.keyMarker[key] = key_marker_server\n\treturn m.serverConsManager.Add(key, con)\n}", "func caNewCmd(cmd *cli.Cmd) {\n\tcmd.Spec = \"NAME [OPTIONS]\"\n\n\tparams := controller.NewCAParams()\n\tparams.Name = cmd.StringArg(\"NAME\", \"\", \"name of CA\")\n\n\tparams.CertFile = cmd.StringOpt(\"cert\", \"\", \"certificate PEM file\")\n\tparams.KeyFile = cmd.StringOpt(\"key\", \"\", \"key PEM file\")\n\tparams.Tags = cmd.StringOpt(\"tags\", \"NAME\", \"comma separated list of tags\")\n\tparams.CaExpiry = cmd.IntOpt(\"ca-expiry\", 365, \"CA expiry period in days\")\n\tparams.CertExpiry = cmd.IntOpt(\"cert-expiry\", 90, \"Certificate expiry period in days\")\n\tparams.KeyType = cmd.StringOpt(\"key-type\", \"ec\", \"Key type (ec or rsa)\")\n\tparams.DnLocality = cmd.StringOpt(\"dn-l\", \"\", \"Locality for DN scope\")\n\tparams.DnState = cmd.StringOpt(\"dn-st\", \"\", \"State/province for DN scope\")\n\tparams.DnOrg = cmd.StringOpt(\"dn-o\", \"\", \"Organization for DN scope\")\n\tparams.DnOrgUnit = cmd.StringOpt(\"dn-ou\", \"\", \"Organizational unit for DN scope\")\n\tparams.DnCountry = cmd.StringOpt(\"dn-c\", \"\", \"Country for DN scope\")\n\tparams.DnStreet = cmd.StringOpt(\"dn-street\", \"\", \"Street for DN scope\")\n\tparams.DnPostal = cmd.StringOpt(\"dn-postal\", \"\", \"PostalCode for DN scope\")\n\n\tcmd.Action = func() {\n\t\tapp := NewAdminApp()\n\t\tlogger.Info(\"creating new CA\")\n\n\t\tcont, err := controller.NewCA(app.env)\n\t\tif err != nil {\n\t\t\tapp.Fatal(err)\n\t\t}\n\n\t\tca, err := cont.New(params)\n\t\tif err != nil {\n\t\t\tapp.Fatal(err)\n\t\t}\n\n\t\tif ca != nil {\n\t\t\ttable := app.NewTable()\n\n\t\t\tcaData := [][]string{\n\t\t\t\t[]string{\"Id\", ca.Id()},\n\t\t\t\t[]string{\"Name\", ca.Name()},\n\t\t\t}\n\n\t\t\ttable.AppendBulk(caData)\n\n\t\t\tapp.RenderTable(table)\n\t\t}\n\n\t}\n}", "func writeCApem(t *testing.T, srv *httptest.Server, tmpDir string, certName string) *os.File {\n\tcaPEM := new(bytes.Buffer)\n\terr := pem.Encode(caPEM, &pem.Block{\n\t\tType: \"CERTIFICATE\",\n\t\tBytes: srv.Certificate().Raw,\n\t})\n\trequire.NoError(t, err)\n\n\t// Then write the ca.pem to disk\n\tcaPem, err := os.Create(filepath.Join(tmpDir, certName))\n\trequire.NoError(t, err)\n\t_, err = caPem.Write(caPEM.Bytes())\n\trequire.NoError(t, err)\n\treturn caPem\n}", "func (b *GitHubIdentityProviderApplyConfiguration) WithCA(value *ConfigMapNameReferenceApplyConfiguration) *GitHubIdentityProviderApplyConfiguration {\n\tb.CA = value\n\treturn b\n}", "func (c MTLSCerts) appendSANs(cert *x509.Certificate, dnsNames []string, ips []string) error {\n\tcert.DNSNames = dnsNames\n\n\tif len(ips) == 0 {\n\t\tfor _, name := range dnsNames {\n\t\t\tips, err := net.LookupIP(name)\n\t\t\tif err != nil {\n\t\t\t\treturn trace.Wrap(err)\n\t\t\t}\n\n\t\t\tif ips != nil {\n\t\t\t\tcert.IPAddresses = append(cert.IPAddresses, ips...)\n\t\t\t}\n\t\t}\n\t} else {\n\t\tfor _, ip := range ips {\n\t\t\tcert.IPAddresses = append(cert.IPAddresses, net.ParseIP(ip))\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o GithubEnterpriseConfigOutput) SslCa() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GithubEnterpriseConfig) pulumi.StringOutput { return v.SslCa }).(pulumi.StringOutput)\n}", "func (acm *AcmeFS) CreateCA(acct tlsfs.NewDomain, tos tlsfs.TOSAction) (tlsfs.TLSDomainCertificate, tlsfs.Status, error) {\n\treturn tlsfs.TLSDomainCertificate{}, tlsfs.WithStatus(tlsfs.OPFailed, tlsfs.ErrNotSupported), tlsfs.ErrNotSupported\n}", "func (o ArgoCDSpecServerRouteTlsOutput) CaCertificate() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ArgoCDSpecServerRouteTls) *string { return v.CaCertificate }).(pulumi.StringPtrOutput)\n}", "func (t *trustedBundle) AddCertificates(certificates ...CertificateInterface) {\n\tfor _, cert := range certificates {\n\t\t// Check if we already trust an issuer of this cert. In practice, this will be 0 or 1 iteration,\n\t\t// because the issuer is only set when the tigera-ca-private is the issuer.\n\t\tcur := cert\n\t\tvar skip bool\n\t\tfor cur != nil && !skip {\n\t\t\thash := rmeta.AnnotationHash(cur.GetCertificatePEM())\n\t\t\tcur = cur.GetIssuer()\n\t\t\tif _, found := t.certificates[hash]; found {\n\t\t\t\tskip = true\n\t\t\t}\n\t\t}\n\t\tif cert != nil && !skip {\n\t\t\t// Add the leaf certificate\n\t\t\thash := rmeta.AnnotationHash(cert.GetCertificatePEM())\n\t\t\tt.certificates[hash] = cert\n\t\t}\n\t}\n}", "func pathFetchCA(b *backend) *framework.Path {\n\treturn &framework.Path{\n\t\tPattern: `ca(/pem)?`,\n\n\t\tCallbacks: map[logical.Operation]framework.OperationFunc{\n\t\t\tlogical.ReadOperation: b.pathFetchRead,\n\t\t},\n\n\t\tHelpSynopsis: pathFetchHelpSyn,\n\t\tHelpDescription: pathFetchHelpDesc,\n\t}\n}", "func NewCA(config *NewCertConfig, names ...string) (*Certificate, error) {\n\tconfig.IsCA = true\n\n\tcert, err := newCert(config, names...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcertPool := x509.NewCertPool()\n\tcertPool.AddCert(cert.Cert)\n\tcert.CertPool = certPool\n\n\treturn cert, nil\n}", "func Issue(hosts []string) (caCert []byte, tlsCert []byte, tlsKey []byte, err error) {\n\trsaBits := 2048\n\tpriv, err := rsa.GenerateKey(rand.Reader, rsaBits)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"generate rsa key: %v\", err)\n\t}\n\tserialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)\n\tserialNumber, err := rand.Int(rand.Reader, serialNumberLimit)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"generate serial number for root: %v\", err)\n\t}\n\tsubject := pkix.Name{\n\t\tOrganization: []string{\"Root CA\"},\n\t}\n\tissuer := pkix.Name{\n\t\tOrganization: []string{\"Root CA\"},\n\t}\n\tcaTemplate := x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tSubject: subject,\n\t\tNotBefore: time.Now(),\n\t\tNotAfter: validityNotAfter,\n\t\tKeyUsage: x509.KeyUsageCertSign,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageClientAuth},\n\t\tBasicConstraintsValid: true,\n\t\tIsCA: true,\n\t}\n\n\tderBytes, err := x509.CreateCertificate(rand.Reader, &caTemplate, &caTemplate, &priv.PublicKey, priv)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"generate CA certificate: %v\", err)\n\t}\n\tcertOut := &bytes.Buffer{}\n\terr = pem.Encode(certOut, &pem.Block{Type: \"CERTIFICATE\", Bytes: derBytes})\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"encode CA certificate: %v\", err)\n\t}\n\tcert := certOut.Bytes()\n\n\tserialNumber, err = rand.Int(rand.Reader, serialNumberLimit)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"generate serial number for client: %v\", err)\n\t}\n\tsubject = pkix.Name{\n\t\tOrganization: []string{\"PXC\"},\n\t}\n\ttlsTemplate := x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tSubject: subject,\n\t\tIssuer: issuer,\n\t\tNotBefore: time.Now(),\n\t\tNotAfter: validityNotAfter,\n\t\tDNSNames: hosts,\n\t\tKeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageClientAuth},\n\t\tBasicConstraintsValid: true,\n\t\tIsCA: false,\n\t}\n\tclientKey, err := rsa.GenerateKey(rand.Reader, 2048)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"generate client key: %v\", err)\n\t}\n\ttlsDerBytes, err := x509.CreateCertificate(rand.Reader, &tlsTemplate, &caTemplate, &clientKey.PublicKey, priv)\n\tif err != nil {\n\t\treturn nil, nil, nil, err\n\t}\n\ttlsCertOut := &bytes.Buffer{}\n\terr = pem.Encode(tlsCertOut, &pem.Block{Type: \"CERTIFICATE\", Bytes: tlsDerBytes})\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"encode TLS certificate: %v\", err)\n\t}\n\ttlsCert = tlsCertOut.Bytes()\n\n\tkeyOut := &bytes.Buffer{}\n\tblock := &pem.Block{Type: \"RSA PRIVATE KEY\", Bytes: x509.MarshalPKCS1PrivateKey(clientKey)}\n\terr = pem.Encode(keyOut, block)\n\tif err != nil {\n\t\treturn nil, nil, nil, fmt.Errorf(\"encode RSA private key: %v\", err)\n\t}\n\tprivKey := keyOut.Bytes()\n\n\treturn cert, tlsCert, privKey, nil\n}", "func (s *Basememcached_protocolListener) EnterCas_unique(ctx *Cas_uniqueContext) {}", "func Add(mgr manager.Manager) error {\n\tcerts, err := certs.LoadCerts(\".\")\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn add(mgr, newReconciler(mgr, certs))\n}", "func (b *Builder) WithCA(caCert []byte) *Builder {\n\tif b.r.Spec.TLS != nil {\n\t\tb.r.Spec.TLS.CACertificate = string(caCert)\n\t\tb.r.Spec.TLS.DestinationCACertificate = string(caCert)\n\t}\n\treturn b\n}", "func (s *Server) createDefaultCAConfigs(cacount int) error {\n\tlog.Debugf(\"Creating %d default CA configuration files\", cacount)\n\n\tcashome, err := util.MakeFileAbs(\"ca\", s.HomeDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tos.Mkdir(cashome, 0755)\n\n\tfor i := 1; i <= cacount; i++ {\n\t\tcahome := fmt.Sprintf(cashome+\"/ca%d\", i)\n\t\tcfgFileName := filepath.Join(cahome, \"fabric-ca-config.yaml\")\n\n\t\tcaName := fmt.Sprintf(\"ca%d\", i)\n\t\tcfg := strings.Replace(defaultCACfgTemplate, \"<<<CANAME>>>\", caName, 1)\n\n\t\tcn := fmt.Sprintf(\"fabric-ca-server-ca%d\", i)\n\t\tcfg = strings.Replace(cfg, \"<<<COMMONNAME>>>\", cn, 1)\n\n\t\tdatasource := dbutil.GetCADataSource(s.CA.Config.DB.Type, s.CA.Config.DB.Datasource, i)\n\t\tcfg = strings.Replace(cfg, \"<<<DATASOURCE>>>\", datasource, 1)\n\n\t\ts.Config.CAfiles = append(s.Config.CAfiles, cfgFileName)\n\n\t\t// Now write the file\n\t\terr := os.MkdirAll(filepath.Dir(cfgFileName), 0755)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = ioutil.WriteFile(cfgFileName, []byte(cfg), 0644)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t}\n\treturn nil\n}", "func reloadCACert(wc *WebhookController) (bool, error) {\n\tcertChanged := false\n\twc.certMutex.Lock()\n\tdefer wc.certMutex.Unlock()\n\tcaCert, err := readCACert(wc.k8sCaCertFile)\n\tif err != nil {\n\t\treturn certChanged, err\n\t}\n\tif !bytes.Equal(caCert, wc.CACert) {\n\t\twc.CACert = append([]byte(nil), caCert...)\n\t\tcertChanged = true\n\t}\n\treturn certChanged, nil\n}", "func GenerateCA(commonName, organizationalUnit string) (*CA, error) {\n\tcfg := &CertCfg{\n\t\tSubject: pkix.Name{CommonName: commonName, OrganizationalUnit: []string{organizationalUnit}},\n\t\tKeyUsages: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign,\n\t\tExtKeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth, x509.ExtKeyUsageServerAuth},\n\t\tValidity: ValidityTenYears,\n\t\tIsCA: true,\n\t}\n\n\tkey, crt, err := GenerateSelfSignedCertificate(cfg)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to generate CA with cn=%s,ou=%s\", commonName, organizationalUnit)\n\t}\n\treturn &CA{Key: key, Cert: crt}, nil\n}", "func (s *HTTPServer) AgentConnectCALeafCert(resp http.ResponseWriter, req *http.Request) (interface{}, error) {\n\t// Get the service name. Note that this is the name of the service,\n\t// not the ID of the service instance.\n\tserviceName := strings.TrimPrefix(req.URL.Path, \"/v1/agent/connect/ca/leaf/\")\n\n\targs := cachetype.ConnectCALeafRequest{\n\t\tService: serviceName, // Need name not ID\n\t}\n\tvar qOpts structs.QueryOptions\n\n\t// Store DC in the ConnectCALeafRequest but query opts separately\n\t// Don't resolve a proxy token to a real token that will be\n\t// done with a call to verifyProxyToken later along with\n\t// other security relevant checks.\n\tif done := s.parseWithoutResolvingProxyToken(resp, req, &args.Datacenter, &qOpts); done {\n\t\treturn nil, nil\n\t}\n\targs.MinQueryIndex = qOpts.MinQueryIndex\n\targs.MaxQueryTime = qOpts.MaxQueryTime\n\n\t// Verify the proxy token. This will check both the local proxy token\n\t// as well as the ACL if the token isn't local. The checks done in\n\t// verifyProxyToken are still relevant because a leaf cert can be cached\n\t// verifying the proxy token matches the service id or that a real\n\t// acl token still is valid and has ServiceWrite is necessary or\n\t// that cached cert is potentially unprotected.\n\teffectiveToken, _, err := s.agent.verifyProxyToken(qOpts.Token, serviceName, \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\targs.Token = effectiveToken\n\n\traw, m, err := s.agent.cache.Get(cachetype.ConnectCALeafName, &args)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer setCacheMeta(resp, &m)\n\n\treply, ok := raw.(*structs.IssuedCert)\n\tif !ok {\n\t\t// This should never happen, but we want to protect against panics\n\t\treturn nil, fmt.Errorf(\"internal error: response type not correct\")\n\t}\n\tsetIndex(resp, reply.ModifyIndex)\n\n\treturn reply, nil\n}", "func (ch *CertHelper) loadCACert(caCertPath string, caPrivateKeyPath string) derrors.Error {\n\tca, err := tls.LoadX509KeyPair(caCertPath, caPrivateKeyPath)\n\tif err != nil {\n\t\treturn derrors.AsError(err, \"cannot load CA certificate an Private Key\")\n\t}\n\tif len(ca.Certificate) == 0 {\n\t\treturn derrors.NewNotFoundError(\"CA certificate not found in path\")\n\t}\n\tcaCert, err := x509.ParseCertificate(ca.Certificate[0])\n\tif err != nil {\n\t\treturn derrors.AsError(err, \"cannot parse CA certificate\")\n\t}\n\tch.CACert = caCert\n\tch.PrivateKey = ca.PrivateKey\n\tlog.Info().Str(\"dnsNames\", strings.Join(ch.CACert.DNSNames, \", \")).Msg(\"CA cert has been loaded\")\n\treturn nil\n}", "func (o ArgoCDSpecGrafanaRouteTlsOutput) CaCertificate() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ArgoCDSpecGrafanaRouteTls) *string { return v.CaCertificate }).(pulumi.StringPtrOutput)\n}", "func (mc *MoacChain) addContractAddress(contractAddr string) (err error) {\n\n\terr = mc.getContractInfo(contractAddr)\n\tif err == nil {\n\t\tvar cm database.CurrencyManagement\n\t\tflag2, err2 := database.Engine.Where(\"currency_contract_address = ?\", contractAddr).Get(&cm)\n\t\tif err2 == nil {\n\t\t\tif flag2 {\n\t\t\t\tvar countId int64 = 1\n\t\t\t\tvar counter int = 0\n\t\t\t\tfor {\n\t\t\t\t\tflag, err1 := database.Engine.Where(\"id = ?\", countId).Exist(&database.UserInfo{})\n\t\t\t\t\tif err1 == nil {\n\t\t\t\t\t\tif flag {\n\t\t\t\t\t\t\tcounter = 0\n\n\t\t\t\t\t\t\tvar ua database.UserAssets\n\t\t\t\t\t\t\tua.UserId = countId\n\t\t\t\t\t\t\tua.CurrencyNumber = 0\n\t\t\t\t\t\t\tua.CurrencyId = cm.CurrencyId\n\t\t\t\t\t\t\terr = database.SessionSubmit(func(session *xorm.Session) (err1 error) {\n\t\t\t\t\t\t\t\t_, err1 = session.Insert(ua)\n\t\t\t\t\t\t\t\treturn err1\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tcounter++\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif counter == 11 {\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t}\n\t\t\t\t\t\tcountId++\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn err\n}", "func (c CertAuthType) NewlyAdded() bool {\n\treturn c.addedInMajorVer() >= semver.New(api.Version).Major\n}", "func (t *TnT) add_ecert(stub shim.ChaincodeStubInterface, name string, ecert string) ([]byte, error) {\n\n\terr := stub.PutState(name, []byte(ecert))\n\n\tif err == nil {\n\t\treturn nil, errors.New(\"Error storing eCert for user \" + name + \" identity: \" + ecert)\n\t}\n\n\treturn nil, nil\n\n}", "func (c *CertGenerator) GetCA() *CA { return c.CA }", "func (s *Service) addCoinCheck(c context.Context, mid, aid, tp, multiply, maxCoin, upmid int64) (err error) {\n\tvar (\n\t\tadded int64\n\t\texist bool\n\t)\n\tif _, ok := s.businesses[tp]; !ok {\n\t\terr = ecode.RequestErr\n\t\treturn\n\t}\n\tif upmid == mid {\n\t\tlog.Errorv(c, log.KV(\"log\", \"user can not add coin to self archive\"), log.KV(\"mid\", mid))\n\t\terr = ecode.CoinCannotAddToSelf\n\t\treturn\n\t}\n\tif multiply > maxCoin {\n\t\tlog.Errorv(c, log.KV(\"log\", fmt.Sprintf(\"multiply(%d) can not bigger than maxCoin(%d)\", multiply, maxCoin)), log.KV(\"mid\", mid))\n\t\terr = ecode.CoinIllegaMultiply\n\t\treturn\n\t}\n\tif err = s.checkUser(c, mid); err != nil {\n\t\tlog.Errorv(c, log.KV(\"log\", \"checkUser error\"), log.KV(\"mid\", mid), log.KV(\"err\", err))\n\t\treturn\n\t}\n\tif exist, err = s.coinDao.ExpireCoinAdded(c, mid); err == nil && exist {\n\t\tadded, _ = s.coinDao.CoinsAddedCache(c, mid, aid, tp)\n\t}\n\tif !exist || (added == 0) {\n\t\tif added, err = s.coinDao.CoinsAddedByMid(c, mid, aid, tp); err != nil {\n\t\t\treturn\n\t\t}\n\t\ts.cache.Do(c, func(c context.Context) {\n\t\t\ts.coinDao.SetCoinAddedCache(c, mid, aid, tp, added)\n\t\t\tif !exist {\n\t\t\t\ts.loadUserCoinAddedCache(c, mid)\n\t\t\t}\n\t\t})\n\t}\n\tif added+multiply > maxCoin {\n\t\tlog.Errorv(c, log.KV(\"log\", \"add too much coins\"), log.KV(\"mid\", mid), log.KV(\"err\", err))\n\t\terr = ecode.CoinOverMax\n\t\treturn\n\t}\n\tvar coins *pb.UserCoinsReply\n\tif coins, err = s.UserCoins(c, &pb.UserCoinsReq{Mid: mid}); err != nil {\n\t\treturn\n\t}\n\tif coins.Count < (float64)(multiply) {\n\t\tlog.Errorv(c, log.KV(\"log\", \"have not enough money\"), log.KV(\"mid\", mid), log.KV(\"coins\", coins.Count))\n\t\terr = ecode.LackOfCoins\n\t}\n\treturn\n}", "func (h *Handler) createCASecret(config *eksv1.EKSClusterConfig, clusterState *eks.DescribeClusterOutput) error {\n\tendpoint := aws.StringValue(clusterState.Cluster.Endpoint)\n\tca := aws.StringValue(clusterState.Cluster.CertificateAuthority.Data)\n\n\t_, err := h.secrets.Create(\n\t\t&corev1.Secret{\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: config.Name,\n\t\t\t\tNamespace: config.Namespace,\n\t\t\t\tOwnerReferences: []metav1.OwnerReference{\n\t\t\t\t\t{\n\t\t\t\t\t\tAPIVersion: eksv1.SchemeGroupVersion.String(),\n\t\t\t\t\t\tKind: eksClusterConfigKind,\n\t\t\t\t\t\tUID: config.UID,\n\t\t\t\t\t\tName: config.Name,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\tData: map[string][]byte{\n\t\t\t\t\"endpoint\": []byte(endpoint),\n\t\t\t\t\"ca\": []byte(ca),\n\t\t\t},\n\t\t})\n\treturn err\n}", "func (nc *NamespaceController) namespaceAdded(obj interface{}) {\n\tns, ok := obj.(*v1.Namespace)\n\n\tif ok {\n\t\trootCert := nc.ca.GetCAKeyCertBundle().GetRootCertPem()\n\t\terr := certutil.InsertDataToConfigMapWithRetry(nc.core, ns.GetName(), string(rootCert), CACertNamespaceConfigMap,\n\t\t\tCACertNamespaceConfigMapDataName, CACertNamespaceInsertInterval, CACertNamespaceInsertTimeout)\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"error when inserting CA cert to configmap: %v\", err)\n\t\t} else {\n\t\t\tlog.Debugf(\"inserted CA cert to configmap %v in ns %v\",\n\t\t\t\tCACertNamespaceConfigMap, ns.GetName())\n\t\t}\n\t}\n}", "func (auth *AuthManager) addClient(client *clientInfo) {\n\tauth.connMtx.Lock()\n\tdefer auth.connMtx.Unlock()\n\tauth.users[client.acct.ID] = client\n\tauth.conns[client.conn.ID()] = client\n}", "func (p *clientConnPool) addConnIfNeeded(key string, t *Transport, c *tls.Conn) (used bool, err error) {\n\tp.mu.Lock()\n\tfor _, cc := range p.conns[key] {\n\t\tif cc.CanTakeNewRequest() {\n\t\t\tp.mu.Unlock()\n\t\t\treturn false, nil\n\t\t}\n\t}\n\tcall, dup := p.addConnCalls[key]\n\tif !dup {\n\t\tif p.addConnCalls == nil {\n\t\t\tp.addConnCalls = make(map[string]*addConnCall)\n\t\t}\n\t\tcall = &addConnCall{\n\t\t\tp: p,\n\t\t\tdone: make(chan struct{}),\n\t\t}\n\t\tp.addConnCalls[key] = call\n\t\tgo call.run(t, key, c)\n\t}\n\tp.mu.Unlock()\n\n\t<-call.done\n\tif call.err != nil {\n\t\treturn false, call.err\n\t}\n\treturn !dup, nil\n}", "func caInfo(orgName string, sdk *fabsdk.FabricSDK) (*msp.GetCAInfoResponse, error) {\n\tmspClient, err := msp.New(sdk.Context(), msp.WithOrg(orgName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn mspClient.GetCAInfo()\n}", "func AddSSHCAToClient(publicKey ssh.PublicKey, hosts []string, sshDir string) error {\n\tcaPublic := string(ssh.MarshalAuthorizedKey(publicKey))\n\tknownHosts := filepath.Join(sshDir, KnownHosts)\n\tknownHostsFile, err := os.OpenFile(knownHosts, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0644)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\treturn fmt.Errorf(\"could not create known_hosts file: %w\", err)\n\t\t}\n\t\treturn err\n\t}\n\tdefer knownHostsFile.Close()\n\texistingKnownHostsContent, err := ioutil.ReadAll(knownHostsFile)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error reading %s: %w\", knownHosts, err)\n\t}\n\tfor _, dns := range hosts {\n\t\t// caPublic terminates with a '\\n', added by ssh.MarshalAuthorizedKey\n\t\tpublicFormat := fmt.Sprintf(\"%s %s %s\", CAPrefix, dns, caPublic)\n\t\tif strings.Contains(string(existingKnownHostsContent), publicFormat) {\n\t\t\tcontinue\n\t\t}\n\t\t_, err = knownHostsFile.WriteString(publicFormat)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"could not add key %s to known_hosts file: %w\", publicFormat, err)\n\t\t}\n\t}\n\treturn nil\n}", "func (r *ReconcileRethinkDBCluster) reconcileCASecret(cr *rethinkdbv1alpha1.RethinkDBCluster) (*corev1.Secret, error) {\n\tname := fmt.Sprintf(\"%s-ca\", cr.Name)\n\tfound := &corev1.Secret{}\n\n\terr := r.client.Get(context.TODO(), types.NamespacedName{Name: name, Namespace: cr.Namespace}, found)\n\tif err != nil && errors.IsNotFound(err) {\n\t\tlog.Info(\"creating new ca secret\", \"secret\", name)\n\n\t\tsecret, err := newCASecret(cr, name)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\t// Set RethinkDB instance as the owner and controller\n\t\tif err = controllerutil.SetControllerReference(cr, secret, r.scheme); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\terr = r.client.Create(context.TODO(), secret)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn secret, nil\n\t} else if err != nil {\n\t\treturn nil, err\n\t}\n\n\tlog.Info(\"ca secret exists\", \"secret\", found.Name)\n\treturn found, nil\n}", "func (_SmartTgStats *SmartTgStatsTransactor) CEOAddResponse(opts *bind.TransactOpts, _requestID *big.Int, _willUpdate bool, _subscribers uint32, _postViews uint32, _postTime *big.Int, _error uint8) (*types.Transaction, error) {\n\treturn _SmartTgStats.contract.Transact(opts, \"CEOAddResponse\", _requestID, _willUpdate, _subscribers, _postViews, _postTime, _error)\n}", "func SupportCAS(flagSet *pflag.FlagSet, p *string) func() bool {\n\tconst casName = \"cas\"\n\n\tflagSet.StringVar(p, casName, \"\", \"make changes to a resource conditional upon matching the provided version\")\n\n\treturn func() bool {\n\t\treturn flagSet.Changed(casName)\n\t}\n}", "func addToGRPCSSLServerList(server *grpc.Server) {\n\tgrpcServersMutex.Lock()\n\tgrpcServers = append(grpcServers, server)\n\tgrpcServersMutex.Unlock()\n}", "func (s *Server) Add(ctx context.Context, message *calcpb.AddRequest) (*calcpb.AddResponse, error) {\n\tctx = context.WithValue(ctx, goa.MethodKey, \"add\")\n\tctx = context.WithValue(ctx, goa.ServiceKey, \"calc\")\n\tresp, err := s.AddH.Handle(ctx, message)\n\tif err != nil {\n\t\treturn nil, goagrpc.EncodeError(err)\n\t}\n\treturn resp.(*calcpb.AddResponse), nil\n}", "func (client Client) AddAccessControlSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))\n}", "func NewCAServer(port int) (*CAServer, error) {\n\t// Create root cert and private key.\n\toptions := util.CertOptions{\n\t\tTTL: 3650 * 24 * time.Hour,\n\t\tOrg: spiffe.GetTrustDomain(),\n\t\tIsCA: true,\n\t\tIsSelfSigned: true,\n\t\tRSAKeySize: 2048,\n\t\tIsDualUse: true,\n\t}\n\tcert, key, err := util.GenCertKeyFromOptions(options)\n\tif err != nil {\n\t\tcaServerLog.Errorf(\"cannot create CA cert and private key: %+v\", err)\n\t\treturn nil, err\n\t}\n\tkeyCertBundle, err := util.NewVerifiedKeyCertBundleFromPem(cert, key, nil, cert)\n\tif err != nil {\n\t\tcaServerLog.Errorf(\"failed to create CA KeyCertBundle: %+v\", err)\n\t\treturn nil, err\n\t}\n\n\tserver := &CAServer{\n\t\tcertPem: cert,\n\t\tkeyPem: key,\n\t\tcertLifetime: 24 * time.Hour,\n\t\tkeyCertBundle: keyCertBundle,\n\t\tGRPCServer: grpc.NewServer(),\n\t\tfaultInjectLock: &sync.Mutex{},\n\t}\n\t// Register CA service at gRPC server.\n\tpb.RegisterIstioCertificateServiceServer(server.GRPCServer, server)\n\tghc.RegisterHealthServer(server.GRPCServer, server)\n\treturn server, server.start(port)\n}", "func registerFlags() {\n\tcfsslFlagSet.StringVar(&Config.hostname, \"hostname\", \"\", \"Hostname for the cert\")\n\tcfsslFlagSet.StringVar(&Config.certFile, \"cert\", \"\", \"Client certificate that contains the public key\")\n\tcfsslFlagSet.StringVar(&Config.csrFile, \"csr\", \"\", \"Certificate signature request file for new public key\")\n\tcfsslFlagSet.StringVar(&Config.caFile, \"ca\", \"ca.pem\", \"CA used to sign the new certificate\")\n\tcfsslFlagSet.StringVar(&Config.caKeyFile, \"ca-key\", \"ca-key.pem\", \"CA private key\")\n\tcfsslFlagSet.StringVar(&Config.keyFile, \"key\", \"\", \"private key for the certificate\")\n\tcfsslFlagSet.StringVar(&Config.intermediatesFile, \"intermediates\", \"\", \"intermediate certs\")\n\tcfsslFlagSet.StringVar(&Config.caBundleFile, \"ca-bundle\", \"/etc/cfssl/ca-bundle.crt\", \"Bundle to be used for root certificates pool\")\n\tcfsslFlagSet.StringVar(&Config.intBundleFile, \"int-bundle\", \"/etc/cfssl/int-bundle.crt\", \"Bundle to be used for intermediate certificates pool\")\n\tcfsslFlagSet.StringVar(&Config.address, \"address\", \"127.0.0.1\", \"Address to bind\")\n\tcfsslFlagSet.IntVar(&Config.port, \"port\", 8888, \"Port to bind\")\n\tcfsslFlagSet.StringVar(&Config.configFile, \"f\", \"\", \"path to configuration file\")\n\tcfsslFlagSet.StringVar(&Config.profile, \"profile\", \"\", \"signing profile to use\")\n\tcfsslFlagSet.BoolVar(&Config.isCA, \"initca\", false, \"initialise new CA\")\n\tcfsslFlagSet.StringVar(&Config.intDir, \"int-dir\", \"/etc/cfssl/intermediates\", \"specify intermediates directory\")\n\tcfsslFlagSet.StringVar(&Config.flavor, \"flavor\", \"ubiquitous\", \"Bundle Flavor: ubiquitous, optimal and force.\")\n\tcfsslFlagSet.StringVar(&Config.metadata, \"metadata\", \"/etc/cfssl/ca-bundle.crt.metadata\", \"Metadata file for root certificate presence. The content of the file is a json dictionary (k,v): each key k is SHA-1 digest of a root certificate while value v is a list of key store filenames.\")\n\tcfsslFlagSet.StringVar(&Config.domain, \"domain\", \"\", \"remote server domain name\")\n\tcfsslFlagSet.StringVar(&Config.ip, \"ip\", \"\", \"remote server ip\")\n\tcfsslFlagSet.StringVar(&Config.remote, \"remote\", \"\", \"remote CFSSL server\")\n}", "func (m HostCertificateManager) ReplaceCACertificatesAndCRLs(ctx context.Context, caCert []string, caCrl []string) error {\n\treq := types.ReplaceCACertificatesAndCRLs{\n\t\tThis: m.Reference(),\n\t\tCaCert: caCert,\n\t\tCaCrl: caCrl,\n\t}\n\n\t_, err := methods.ReplaceCACertificatesAndCRLs(ctx, m.Client(), &req)\n\treturn err\n}", "func (client *Client) CAS(vb uint16, k string, f CasFunc,\n\tinitexp int) (rv *gomemcached.MCResponse, err error) {\n\n\tflags := 0\n\texp := 0\n\n\tfor {\n\t\torig, err := client.Get(vb, k)\n\t\tif err != nil && orig != nil && orig.Status != gomemcached.KEY_ENOENT {\n\t\t\treturn rv, err\n\t\t}\n\n\t\tif orig.Status == gomemcached.KEY_ENOENT {\n\t\t\tinit := f([]byte{})\n\t\t\t// If it doesn't exist, add it\n\t\t\tresp, err := client.Add(vb, k, 0, initexp, init)\n\t\t\tif err == nil && resp.Status != gomemcached.KEY_EEXISTS {\n\t\t\t\treturn rv, err\n\t\t\t}\n\t\t\t// Copy the body into this response.\n\t\t\tresp.Body = init\n\t\t\treturn resp, err\n\t\t} else {\n\t\t\treq := &gomemcached.MCRequest{\n\t\t\t\tOpcode: gomemcached.SET,\n\t\t\t\tVBucket: vb,\n\t\t\t\tKey: []byte(k),\n\t\t\t\tCas: orig.Cas,\n\t\t\t\tOpaque: 0,\n\t\t\t\tExtras: []byte{0, 0, 0, 0, 0, 0, 0, 0},\n\t\t\t\tBody: f(orig.Body)}\n\n\t\t\tbinary.BigEndian.PutUint64(req.Extras, uint64(flags)<<32|uint64(exp))\n\t\t\tresp, err := client.Send(req)\n\t\t\tif err == nil {\n\t\t\t\treturn resp, nil\n\t\t\t}\n\t\t}\n\t}\n\tpanic(\"Unreachable\")\n}", "func GenerateCA(commonName string,\n serialNumber int64,\n countryCode string,\n organizationalUnit string,\n algo string,\n ecCurve string) (rootCADER []byte, rootPrivateKeyDER []byte, err error) {\n\n notBefore := time.Now().UTC()\n notAfter := notBefore.AddDate(CAValidity, 0, 0) // (years, months. days)\n\n // Hashing algorithm should match the private key type that signs the certificate.\n // In this case we are self-signing so the key generation algorithm and signature hashing algorithm are both of the same type\n hashingAlgorithm := x509.SHA256WithRSA\n switch strings.ToUpper(algo) {\n case \"RSA\":\n // pass\n case \"ECDSA\":\n hashingAlgorithm = x509.ECDSAWithSHA256\n default:\n return nil, nil, errors.New(\"Unrecognized algorithm, valid options are RSA and ECDSA\")\n }\n\n // https://golang.org/pkg/crypto/x509/#Certificate\n myCACertTemplate := x509.Certificate{\n\n // https://golang.org/pkg/crypto/x509/pkix/#Name\n Subject: pkix.Name{\n CommonName: commonName,\n Country: []string{countryCode},\n Organization: []string{organizationalUnit},\n },\n\n NotBefore: notBefore,\n NotAfter: notAfter,\n SerialNumber: big.NewInt(serialNumber), // returns *big.Int\n KeyUsage: RootCAKeyUsage,\n\n // For CAs we at least want []x509.ExtKeyUsage{x509.ExtKeyUsageAny | x509.KeyUsageCertSign}\n // More info: https://golang.org/pkg/crypto/x509/#ExtKeyUsage\n ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, // this should work\n BasicConstraintsValid: true,\n IsCA: true,\n MaxPathLen: 3, // 1 is enough for our purpose\n SignatureAlgorithm: hashingAlgorithm, // other options are at https://golang.org/pkg/crypto/x509/#SignatureAlgorithm\n }\n\n privKey, pubKey, err := generateKeyPair(algo, ecCurve)\n if err != nil {\n return nil, nil, err\n }\n\n // https://golang.org/pkg/crypto/x509/#CreateCertificate\n // Both the signee and singer are the same template because rootCAs are always self-signed\n rootCADER, err = x509.CreateCertificate(rand.Reader, &myCACertTemplate, &myCACertTemplate, pubKey, privKey)\n if err != nil {\n return nil, nil, err\n }\n\n rootPrivateKeyDER, err = MarshalPrivateKey(privKey)\n\n return rootCADER, rootPrivateKeyDER, err\n}", "func (c *IdentityConfig) loadCATLSConfig(configEntity *identityConfigEntity) error {\n\t//CA Config\n\tfor ca, caConfig := range configEntity.CertificateAuthorities {\n\t\t//resolve paths\n\t\tcaConfig.TLSCACerts.Path = pathvar.Subst(caConfig.TLSCACerts.Path)\n\t\tcaConfig.TLSCACerts.Client.Key.Path = pathvar.Subst(caConfig.TLSCACerts.Client.Key.Path)\n\t\tcaConfig.TLSCACerts.Client.Cert.Path = pathvar.Subst(caConfig.TLSCACerts.Client.Cert.Path)\n\t\t//pre load key and cert bytes\n\t\terr := caConfig.TLSCACerts.Client.Key.LoadBytes()\n\t\tif err != nil {\n\t\t\treturn errors.WithMessage(err, \"failed to load ca key\")\n\t\t}\n\n\t\terr = caConfig.TLSCACerts.Client.Cert.LoadBytes()\n\t\tif err != nil {\n\t\t\treturn errors.WithMessage(err, \"failed to load ca cert\")\n\t\t}\n\t\tconfigEntity.CertificateAuthorities[ca] = caConfig\n\t}\n\n\treturn nil\n}", "func new(master_key string) Ca {\n\tcatls, err := tls.LoadX509KeyPair(\"../storage/root-certificate/ca_cert.pem\", \"../storage/root-certificate/ca_key.pem\")\n\tcheck(err)\n\tfirst_start_time := time.Date(2021, 1, 1, 0, 0, 0, 0, time.UTC).AddDate(0, 0, 0)\n\treturn Ca{catls, master_key, first_start_time}\n}", "func (o LookupDatabaseMysqlResultOutput) CaCert() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupDatabaseMysqlResult) string { return v.CaCert }).(pulumi.StringOutput)\n}" ]
[ "0.6581283", "0.6518734", "0.6132201", "0.60468733", "0.5989953", "0.5984867", "0.5929675", "0.58918387", "0.58866376", "0.5865378", "0.5753565", "0.57387745", "0.5730206", "0.5713935", "0.5605988", "0.5556817", "0.55458033", "0.55412275", "0.5537695", "0.551355", "0.5486503", "0.5468679", "0.5424088", "0.5415645", "0.54014313", "0.5392198", "0.536475", "0.53438795", "0.5320477", "0.5263539", "0.5252348", "0.523924", "0.5231629", "0.5230847", "0.52043486", "0.5174359", "0.51735675", "0.5168841", "0.51651114", "0.5148772", "0.51418877", "0.5132897", "0.5115048", "0.5107266", "0.5104309", "0.51021487", "0.5100435", "0.50920844", "0.508139", "0.5070702", "0.5061821", "0.5061606", "0.5057543", "0.5044167", "0.5040759", "0.50392354", "0.50357294", "0.50337803", "0.5021323", "0.50203675", "0.4999252", "0.49940297", "0.49804473", "0.49743816", "0.49734148", "0.49679756", "0.4957829", "0.49536636", "0.49462503", "0.4938742", "0.4937694", "0.49334508", "0.49226135", "0.49147752", "0.49019602", "0.49016482", "0.4889388", "0.48733267", "0.48645076", "0.48629865", "0.4847666", "0.48411548", "0.48367482", "0.48256236", "0.48234984", "0.48218915", "0.48184457", "0.48167893", "0.4802178", "0.48015934", "0.4795454", "0.47922558", "0.47889534", "0.47830376", "0.47817957", "0.47802952", "0.4778067", "0.47400102", "0.47363466", "0.4735783" ]
0.830354
0
closeDB closes all CA databases
func (s *Server) closeDB() error { log.Debugf("Closing server DBs") // close default CA DB err := s.CA.closeDB() if err != nil { return err } // close other CAs DB for _, c := range s.caMap { err = c.closeDB() if err != nil { return err } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CloseDBs() {\n\tfor k := range allDB {\n\t\tallDB[k].Close()\n\t\tdelete(allDB, k)\n\t}\n\n\tlog.Infoln(\"All databases were closed\")\n}", "func closeDB(db *bbolt.DB) error {\n\treturn db.Close()\n}", "func (dbm *DBManager) closeDB() {\n\tif !dbm.batchMode && dbm.db != nil {\n\t\tdbm.db.Close()\n\t\tdbm.db = nil\n\t}\n\n\treturn\n}", "func CloseDatabase() {\n\tdatabaseAccess.close()\n}", "func CloseAllDatabase() {\n\tmysql.CloseAllDb()\n}", "func (db *DB) Close() error {\n\treturn scatter(len(db.cpdbs), func(i int) error {\n\t\treturn db.cpdbs[i].db.Close()\n\t})\n}", "func (p *DBPool) Close() {\n\tfor _, db := range p.dbs {\n\t\tdb.Close()\n\t}\n}", "func (dbInst *DB) Close() {\n\tdbInst.mux.Lock()\n\tif dbInst.dbState == closed {\n\t\tdbInst.mux.Unlock()\n\t\treturn\n\t}\n\n\tdefer dbInst.mux.Unlock()\n\tfor _, cfHandler := range dbInst.cfHandlesMap {\n\t\tcfHandler.Destroy()\n\t}\n\tdbInst.rocksDB.Close()\n\tdbInst.dbState = closed\n}", "func DBClose(db *sql.DB) {\n\tdb.Close()\n}", "func CloseDB() {\n\tdb.Close()\n\tdb = nil\n}", "func CloseDB() {\n\tdb.Close()\n}", "func CloseDB() {\n\tdb.Close()\n}", "func CloseDB() {\n\tif db != nil {\n\t\tdb.Close()\n\t}\n}", "func CloseDB() {\n\tDb.Close()\n}", "func (db *DB) Close() {\n\tif db == nil {\n\t\treturn\n\t}\n\n\tif db.roIt != nil {\n\t\tdb.roIt.Close()\n\t}\n\n\tif db.ro != nil {\n\t\tdb.ro.Close()\n\t}\n\n\tif db.wo != nil {\n\t\tdb.wo.Close()\n\t}\n\n\tif db.LevigoDB != nil {\n\t\tdb.LevigoDB.Close()\n\t}\n\t// delete cache AFTER close leveldb or it will hang.\n\t// See cache in http://leveldb.googlecode.com/svn/trunk/doc/index.html\n\tif db.cache != nil {\n\t\tdb.cache.Close()\n\t}\n}", "func CloseDB() {\n\tdbService.CloseDB()\n}", "func (m MySQL) Close() error {\n\tfor name, db := range m.dbCons {\n\t\tm.logger.WithFields(logrus.Fields{\"filepath\": name}).Debug(\"closing db connection\")\n\t\tif err := db.Close(); err != nil {\n\t\t\tm.logger.WithFields(logrus.Fields{\"filepath\": name}).Errorf(\"error closing db connection. %+v\", err)\n\t\t}\n\t}\n\n\treturn nil\n}", "func CloseDB() {\n\tdefer db.Close()\n}", "func CloseDB() {\n\tdefer db.Close()\n}", "func CloseDB() error {\n\tif adb == nil {\n\t\treturn nil\n\t}\n\tvar err error\n\tif adb.db != nil {\n\t\terr = adb.db.Close()\n\t}\n\tif adb.db != nil {\n\t\tclose(adb.queue)\n\t}\n\tadb = nil\n\treturn err\n}", "func CloseDB(chain *blockchain.BlockChain) {\n\td := death.NewDeath(syscall.SIGINT, syscall.SIGTERM, os.Interrupt)\n\n\td.WaitForDeathWithFunc(func() {\n\t\tdefer os.Exit(1)\n\t\tdefer runtime.Goexit()\n\t\tchain.Database.Close()\n\t})\n}", "func (r *TableEventHandler) closeDb() {\n\tdb := r.DayDb\n\tif db != nil {\n\t\tdb.Close()\n\t}\n\n\tdb = r.HourDb\n\tif db != nil {\n\t\tdb.Close()\n\t}\n\n\tdb = r.MinDb\n\tif db != nil {\n\t\tdb.Close()\n\t}\n\n\tdb = r.SecDb\n\tif db != nil {\n\t\tdb.Close()\n\t}\n}", "func CloseDB() error {\n\tif db == nil {\n\t\treturn errors.New(\"database not initialized\")\n\t}\n\treturn db.Close()\n}", "func (db *DB) Close() error {\n\tif db.cancelBgWorker != nil {\n\t\tdb.cancelBgWorker()\n\t}\n\tdb.closeWg.Wait()\n\tdb.mu.Lock()\n\tdefer db.mu.Unlock()\n\tif err := db.writeMeta(); err != nil {\n\t\treturn err\n\t}\n\tif err := db.datalog.close(); err != nil {\n\t\treturn err\n\t}\n\tif err := db.index.close(); err != nil {\n\t\treturn err\n\t}\n\tif err := db.lock.Unlock(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func CloseDB() {\n\tdb, err := db.DB()\n\tif err != nil {\n\t\tdefer db.Close()\n\t}\n}", "func (uc *UserCheckpoint) CloseDB() {\n\tif uc.Ucdb == nil {\n\t\treturn\n\t}\n\tuc.Ucdb.Close()\n\tuc.Ucdb = nil\n}", "func (c *Client) CloseDB(addr string) {\n\tif db, found := c.dbs[addr]; found {\n\t\tdb.Close()\n\t\tdelete(c.dbs, addr)\n\t}\n}", "func DBClose() error {\n\treturn db.Close()\n}", "func (l *LogDB) Close() (err error) {\n\tfunc() {\n\t\tl.mu.Lock()\n\t\tdefer l.mu.Unlock()\n\t\terr = firstError(err, l.collection.iterate(func(db *db) error {\n\t\t\treturn db.close()\n\t\t}))\n\t}()\n\terr = firstError(err, l.bsDir.Close())\n\terr = firstError(err, l.dir.Close())\n\treturn firstError(err, l.fileLock.Close())\n}", "func CloseDB() error {\n\treturn db.Close()\n}", "func CloseDB(dbPath string) (code int, err error) {\n\tif _, ok := allDB[dbPath]; !ok {\n\t\treturn http.StatusBadRequest, errors.New(\"There's no any db with such path (\" + dbPath + \")\")\n\t}\n\n\tdbName := allDB[dbPath].Name\n\tallDB[dbPath].Close()\n\tdelete(allDB, dbPath)\n\n\tlog.Infof(\"DB \\\"%s\\\" (%s) was closed\\n\", dbName, dbPath)\n\treturn http.StatusOK, nil\n}", "func (c *client) close() {\n\tc.user.Close()\n\tc.conn.Close()\n\n\tc.sysConn.Exec(\"DROP DATABASE IF EXISTS \" + c.connConfig.Database)\n\tc.sysConn.Close()\n}", "func (db *DB) Close() error {\n\tif db == nil {\n\t\treturn nil\n\t}\n\n\tif db.master != nil {\n\t\tif err := db.master.Close(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tdb.RLock()\n\tdefer db.RUnlock()\n\tfor _, readReplica := range db.readReplicas {\n\t\tif readReplica != nil {\n\t\t\tif err := readReplica.Close(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tselect {\n\tcase db.killHealthCheck <- struct{}{}:\n\tdefault:\n\t}\n\n\treturn nil\n}", "func (db *EdDb) Close() {\n\t// close prepared statements\n\tfor title := range db.preparedStatements() {\n\t\tdb.statements[title].Close()\n\t}\n\n\t//close databases\n\tdb.dbConn.Close()\n\treturn\n}", "func (m *MultiDB) Close() (ErrCode, error) {\n\terrcount := 0\n\ts := \"\"\n\tfor _, v := range m.userdbs {\n\t\tif v != nil {\n\t\t\terr := v.Close()\n\t\t\tif err != nil {\n\t\t\t\ts = fmt.Sprintf(\"%s, %s\", s, err.Error())\n\t\t\t\terrcount++\n\t\t\t}\n\t\t}\n\t}\n\tfor k := range m.userdbs {\n\t\tdelete(m.userdbs, k)\n\t}\n\tif err := m.system.Close(); err != nil {\n\t\ts = fmt.Sprintf(\"%s, %s\", s, err.Error())\n\t\terrcount++\n\t}\n\tif errcount > 0 {\n\t\treturn ErrCloseFailed, Fail(`errors closing multi user DB: %s`, s)\n\t}\n\treturn OK, nil\n}", "func cleanup(c *context.Context, t *testing.T) {\n\tdbPath := c.DB.Path()\n\n\terr := c.DB.Close()\n\tif err != nil {\n\t\tt.Logf(\"Could not close db: %v\", err)\n\t}\n\n\terr = os.Remove(dbPath)\n\tif err != nil {\n\t\tt.Logf(\"Could not remove db (%v): %v\", dbPath, err)\n\t}\n}", "func (gdb *Gdb) CloseGdb() error {\n\t// block util all history data in memory sync to disk\n\tt := time.NewTicker(time.Second)\n\tfor {\n\t\tselect {\n\t\tcase <-t.C:\n\t\t\tif !gdb.syncStatus {\n\t\t\t\t// finish syncing\n\t\t\t\tt.Stop()\n\t\t\t\tgoto next\n\t\t\t}\n\t\t}\n\t}\nnext:\n\teg := sync.WaitGroup{}\n\teg.Add(2)\n\tgo func() {\n\t\t// syncRealTimeData\n\t\tdefer eg.Done()\n\t\t_ = gdb.rtDb.Sync()\n\t}()\n\tgo func() {\n\t\t// sync history data\n\t\tdefer eg.Done()\n\t\t_ = gdb.innerSync(time.Now())\n\t}()\n\teg.Wait()\n\t// close leveldb\n\tfor _, dataType := range dataTypes {\n\t\tfor _, db := range gdb.hisDb[dataType] {\n\t\t\t_ = db.Close()\n\t\t}\n\t}\n\treturn nil\n}", "func (c *Config) CloseDB() error {\n\treturn c.db.Close()\n}", "func (s *SyncStorage) Close() error {\n\tvar ret error\n\tfor _, db := range s.db.instances {\n\t\tif err := db.CloseDB(); err != nil {\n\t\t\tret = err\n\t\t}\n\t}\n\treturn ret\n}", "func (db TestDB) Close() error {\n\treturn nil\n}", "func (db *Database) Close() error {\n\tdb.lock.Lock()\n\tdefer db.lock.Unlock()\n\n\tif db.db == nil {\n\t\treturn database.ErrClosed\n\t}\n\n\tdb.readOptions.Destroy()\n\tdb.iteratorOptions.Destroy()\n\tdb.writeOptions.Destroy()\n\tdb.db.Close()\n\n\tdb.db = nil\n\treturn nil\n}", "func (kb *Keybase) CloseDB() {\n\tkb.mx.Lock()\n\tdefer kb.mx.Unlock()\n\tkb.kb.CloseDB()\n}", "func (db *DB) Close() {\n\tif err := db.Client.Disconnect(context.TODO()); err != nil {\n\t\tlog.Printf(\"Error closing mongo storage: %s\\n\", err)\n\t}\n}", "func Close() {\n\t_db.Close()\n\t_db = nil\n}", "func CleanupDB(t *testing.T, fileName string, db *storm.DB) {\n\terr := db.Close()\n\tif err != nil {\n\t\tt.Logf(\"Could not close db %v\\n\", err)\n\t}\n\tCleanupTempFile(t, fileName)\n}", "func (db *DB) Close() {\n\tdb.boltDB.Close()\n}", "func Close(db *sql.DB) {\n\tdb.Close()\n}", "func CloseDB() error {\n\tsugar.Debug(\"CloseDB\")\n\treturn DB.Close()\n}", "func (db *sliceDB) close() {\r\n\tclose(db.quit)\r\n\tdb.lvl.Close()\r\n}", "func destroy() {\n\tif unique.natsconn != nil {\n\t\tunique.natsconn.Close()\n\t}\n\tfor _, dict := range unique.mdb {\n\t\tdict.Close()\n\t}\n\tunique = nil\n}", "func (db *DB) Close() {\n\tif db.closed {\n\t\treturn\n\t}\n\n\tdb.closed = true\n\tC.leveldb_close(db.Ldb)\n}", "func Close() {\n\tdbcontext.Close()\n}", "func (db *DB) Close() error {\n\terr := db.levelJobsStmt.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = db.clientsStmt.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn db.DB.Close()\n}", "func (db *DB) Close() error {\n\terr := db.DB.Close()\n\tdb.metrics.close()\n\treturn err\n}", "func Close() (err error) {\n\tif readOnlyDB != nil {\n\t\tif err = readOnlyDB.Close(); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\tif readWriteDB != nil {\n\t\tif err = readWriteDB.Close(); err != nil {\n\t\t\treturn\n\t\t}\n\t}\n\n\treturn\n}", "func (databaseConfig *DatabaseConfig) Close() error {\n\tvar oldDb = databaseConfig.db\n\tdatabaseConfig.db = nil\n\n\treturn oldDb.Close()\n}", "func TestDBFree(test_db *Sqlite) {\n\ttest_db.CloseDB()\n}", "func (d *DB) Close() error { return d.lab.Close() }", "func (db *DB) Close() error {\n\terrList := []string{}\n\tif err := db.master.Close(); err != nil {\n\t\terrList = append(errList, err.Error())\n\t}\n\n\tfor _, r := range db.readreplicas {\n\t\tif rerr := r.Close(); rerr != nil {\n\t\t\terrList = append(errList, rerr.Error())\n\t\t}\n\t}\n\tif len(errList) > 0 {\n\t\tstr := strings.Join(errList, \",\")\n\t\treturn errors.New(str)\n\t}\n\treturn nil\n}", "func (db *DB) Close() error {\n\tif err := db.leader.Close(); err != nil {\n\t\treturn err\n\t}\n\tif err := db.follower.Close(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (db *WorkDB) Close() {\n\tdb.db.Close()\n}", "func (db *DB) Close() {\n\t// Possible Deaollocate errors ignored, we are going to close the connnection anyway.\n\tdb.ClearMap()\n\tdb.Pool.Close()\n}", "func (d *Dao) Close() {\n\td.redis.Close()\n\td.mallDB.Close()\n\td.ugcDB.Close()\n\td.ticketDB.Close()\n}", "func Close() {\n\tdb.Close()\n}", "func Close() {\n\tdb.Close()\n}", "func Close() {\n\tdb.Close()\n}", "func Close() {\n\tdefaultDB.Close()\n}", "func CloseDbConnection(db *pg.DB) {\n\tdb.Close()\n}", "func (db *BotDB) Close() {\n\tif db.db != nil {\n\t\tdb.db.Close()\n\t\tdb.db = nil\n\t}\n}", "func (blockchain *Blockchain) Close() error {\n\tif err := blockchain.appDB.Close(); err != nil {\n\t\treturn err\n\t}\n\tif err := blockchain.storages.StateDB().Close(); err != nil {\n\t\treturn err\n\t}\n\tif err := blockchain.storages.EventDB().Close(); err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func CloseDatabase(db *gorm.DB) error {\n\treturn database.Close(db)\n}", "func (d *Driver) Close() error { return d.DB().Close() }", "func (p *Provider) Close() error {\n\tp.mux.Lock()\n\tdefer p.mux.Unlock()\n\n\tfor _, store := range p.dbs {\n\t\terr := store.db.Close(context.Background())\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(failureDuringCouchDBCloseCall, err)\n\t\t}\n\t}\n\n\terr := p.couchDBClient.Close(context.Background())\n\tif err != nil {\n\t\treturn fmt.Errorf(failureWhileClosingKivikClient, err)\n\t}\n\n\treturn nil\n}", "func (db Database) Close() {\n\tdb.DB.Close()\n}", "func Close() {\n\tdatabase.db.Close()\n}", "func Cleanup(t *testing.T, db db.DB) {\n\tif db != nil {\n\t\tassert.NoError(t, db.Close())\n\t}\n}", "func (ds *MockDatasource) CloseDatabase(log *logrus.Entry, admin bool, nodb bool) error {\n\treturn ds.ErrorClose\n}", "func Cleanup() {\n\tif db := DB(); db != nil {\n\t\tdb.Close()\n\t}\n}", "func (sdb *SolarDb) Close() error {\n\tif sdb.dbMap != nil {\n\t\treturn sdb.dbMap.Db.Close()\n\t}\n\treturn nil\n}", "func (db *DB) Close() {\n\tdefer os.Remove(db.Path())\n\tdb.DB.Close()\n}", "func (db *DB) Close() {\n\tdb.db.Close()\n}", "func EnsureDBClosed() {\n\tif db != nil {\n\t\terr := db.Close()\n\t\tif err != nil {\n\t\t\tlog.Error(\"Failed closing DB connection gracefully: \", err.Error())\n\t\t}\n\t}\n}", "func (d *database) Close() (err error) {\n\t// close filelock\n\terr = d.fileLock.Unlock()\n\tif err != nil {\n\t\tlog.Error(err)\n\t} else {\n\t\tos.Remove(d.name + \".lock\")\n\t}\n\t// close database\n\terr2 := d.db.Close()\n\tif err2 != nil {\n\t\terr = err2\n\t\tlog.Error(err)\n\t}\n\treturn\n}", "func (db *geoDB) close() error {\n\tdb.geodb.Close()\n\treturn nil\n}", "func (ash Handler) Cleanup() error {\n\tkey := ash.getDatabaseKey()\n\tdeleted, err := databasePool.Delete(key)\n\tif deleted {\n\t\tash.logger.Debug(\"unloading unused CA database\", zap.String(\"db_key\", key))\n\t}\n\tif err != nil {\n\t\tash.logger.Error(\"closing CA database\", zap.String(\"db_key\", key), zap.Error(err))\n\t}\n\treturn err\n}", "func CloseDB(taskList []tasks.Task) {\n\tfile, _ := json.MarshalIndent(taskList, \"\", \" \")\n\t_ = ioutil.WriteFile(\"test.json\", file, 0644)\n\tos.Exit(1)\n}", "func (db *DB) Close() {\n\tdb.mu.Lock()\n\tdefer db.mu.Unlock()\n\tif !db.closed {\n\t\tdb.closed = true\n\t\tclose(db.notifyQuit)\n\t\tclose(db.notifyOpen)\n\t\tclose(db.notifyError)\n\t\tclose(db.notifyInfo)\n\t}\n\tif db.reader != nil {\n\t\tdb.reader.Close()\n\t\tdb.reader = nil\n\t}\n}", "func (e *engine) Close() {\n\tif e.dataFlushChecker != nil {\n\t\te.dataFlushChecker.Stop()\n\t}\n\n\te.databases.Range(func(key, value interface{}) bool {\n\t\tdb := value.(Database)\n\t\tif err := db.Close(); err != nil {\n\t\t\tengineLogger.Error(\"close database\", logger.Error(err))\n\t\t}\n\t\treturn true\n\t})\n}", "func (gdb *generalDatabase) Close() error {\n\treturn gdb.db.Close()\n}", "func (db *Database) Close() { db.pg.Close() }", "func (db *CDB) Close() error {\n\tvar err error\n\tif db.data != nil {\n\t\terr = syscall.Munmap(db.data)\n\t}\n\tdb.data = nil\n\treturn err\n}", "func (h *Harness) Close() error {\n\th.t.Helper()\n\tif recErr := recover(); recErr != nil {\n\t\tdefer panic(recErr)\n\t}\n\th.dumpDB() // early as possible\n\n\th.tw.WaitAndAssert(h.t)\n\th.slack.WaitAndAssert()\n\th.email.WaitAndAssert()\n\n\th.mx.Lock()\n\th.closing = true\n\th.mx.Unlock()\n\n\tctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)\n\tdefer cancel()\n\terr := h.backend.Shutdown(ctx)\n\tif err != nil {\n\t\th.t.Error(\"failed to shutdown backend cleanly:\", err)\n\t}\n\th.backendLogs.Close()\n\n\th.slackS.Close()\n\th.twS.Close()\n\n\th.tw.Close()\n\n\th.pgTime.Close()\n\n\tconn, err := pgx.Connect(ctx, DBURL(\"\"))\n\tif err != nil {\n\t\th.t.Error(\"failed to connect to DB:\", err)\n\t}\n\tdefer conn.Close(ctx)\n\t_, err = conn.Exec(ctx, \"drop database \"+sqlutil.QuoteID(h.dbName))\n\tif err != nil {\n\t\th.t.Errorf(\"failed to drop database '%s': %v\", h.dbName, err)\n\t}\n\n\treturn nil\n}", "func (db *DB) Close() error {\n\tdb.rwlock.Lock()\n\tdefer db.rwlock.Unlock()\n\n\tdb.metalock.Lock()\n\tdefer db.metalock.Unlock()\n\n\tdb.mmaplock.Lock()\n\tdefer db.mmaplock.Unlock()\n\n\treturn db.close()\n}", "func Close() {\n\tdb.Close()\n\n\treturn\n}", "func (db *DB) Close() error {\n\tif db.db != nil {\n\t\tdb.db.Close()\n\t}\n\treturn nil\n}", "func (rd *RemoteDB) Close() {\n}", "func (db *StakeDatabase) Close() error {\n\tdb.nodeMtx.Lock()\n\tdefer db.nodeMtx.Unlock()\n\terr1 := db.PoolDB.Close()\n\terr2 := db.StakeDB.Close()\n\tif err1 == nil {\n\t\treturn err2\n\t}\n\tif err2 == nil {\n\t\treturn err1\n\t}\n\treturn fmt.Errorf(\"%v + %v\", err1, err2)\n}", "func (tdb *testDatabase) Close() error {\n\ttdb.Database.Close()\n\treturn os.RemoveAll(testDataPath)\n}", "func (d *dbs) Close(reference interface{}) {\n\td.mutex.Lock()\n\tdefer d.mutex.Unlock()\n\n\tif r, ok := d.Connections[reference]; ok {\n\t\tfor _, d := range r {\n\t\t\tfor _, c := range d {\n\t\t\t\tc.Close()\n\t\t\t}\n\t\t}\n\t}\n}", "func teardownBoltDB(db *BoltDB, dbPath string) error {\n\terr := db.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn os.Remove(dbPath)\n}" ]
[ "0.7827408", "0.7401343", "0.737271", "0.73240227", "0.73131734", "0.73106015", "0.727682", "0.7264356", "0.7263661", "0.72356874", "0.72314644", "0.72314644", "0.7228164", "0.7188035", "0.7165286", "0.7058918", "0.69860405", "0.6942995", "0.6942995", "0.6940979", "0.6908177", "0.68881536", "0.6886568", "0.68696386", "0.68615365", "0.68076575", "0.6806629", "0.6802665", "0.6799022", "0.67842126", "0.6781972", "0.6779299", "0.67454183", "0.67429096", "0.67423445", "0.67357206", "0.6718115", "0.67178845", "0.67140836", "0.669666", "0.66862607", "0.667653", "0.6655625", "0.66467524", "0.6593216", "0.65866435", "0.6579375", "0.6578649", "0.6553509", "0.6549316", "0.65461797", "0.65428436", "0.6523367", "0.6520252", "0.65196925", "0.6514187", "0.64930874", "0.6489863", "0.64700884", "0.6469424", "0.64607304", "0.64575875", "0.64509314", "0.64444155", "0.64444155", "0.64444155", "0.64360785", "0.64358115", "0.64323336", "0.6432239", "0.6430831", "0.64039046", "0.6387285", "0.6382541", "0.6379416", "0.6378678", "0.63781524", "0.6374446", "0.6372384", "0.6364577", "0.6358258", "0.63534683", "0.6353234", "0.63426375", "0.6338199", "0.63300246", "0.6319776", "0.6312417", "0.6304878", "0.6304579", "0.63039476", "0.6301112", "0.629464", "0.6291604", "0.6289915", "0.62886304", "0.6285019", "0.6279864", "0.62723815", "0.6262995" ]
0.8528829
0
createDefaultCAConfigs creates specified number of default CA configuration files
func (s *Server) createDefaultCAConfigs(cacount int) error { log.Debugf("Creating %d default CA configuration files", cacount) cashome, err := util.MakeFileAbs("ca", s.HomeDir) if err != nil { return err } os.Mkdir(cashome, 0755) for i := 1; i <= cacount; i++ { cahome := fmt.Sprintf(cashome+"/ca%d", i) cfgFileName := filepath.Join(cahome, "fabric-ca-config.yaml") caName := fmt.Sprintf("ca%d", i) cfg := strings.Replace(defaultCACfgTemplate, "<<<CANAME>>>", caName, 1) cn := fmt.Sprintf("fabric-ca-server-ca%d", i) cfg = strings.Replace(cfg, "<<<COMMONNAME>>>", cn, 1) datasource := dbutil.GetCADataSource(s.CA.Config.DB.Type, s.CA.Config.DB.Datasource, i) cfg = strings.Replace(cfg, "<<<DATASOURCE>>>", datasource, 1) s.Config.CAfiles = append(s.Config.CAfiles, cfgFileName) // Now write the file err := os.MkdirAll(filepath.Dir(cfgFileName), 0755) if err != nil { return err } err = ioutil.WriteFile(cfgFileName, []byte(cfg), 0644) if err != nil { return err } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *Server) initMultiCAConfig() (err error) {\n\tcfg := s.Config\n\tif cfg.CAcount != 0 && len(cfg.CAfiles) > 0 {\n\t\treturn errors.New(\"The --cacount and --cafiles options are mutually exclusive\")\n\t}\n\tif cfg.CAcfg.Intermediate.ParentServer.URL != \"\" && cfg.CAcount > 0 {\n\t\treturn errors.New(\"The --cacount option is not permissible for an intermediate server; use the --cafiles option instead\")\n\t}\n\tcfg.CAfiles, err = util.NormalizeFileList(cfg.CAfiles, s.HomeDir)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Multi-CA related configuration initialization\n\ts.caMap = make(map[string]*CA)\n\tif cfg.CAcount >= 1 {\n\t\ts.createDefaultCAConfigs(cfg.CAcount)\n\t}\n\tif len(cfg.CAfiles) != 0 {\n\t\tlog.Debugf(\"Default CA configuration, if necessary, will be used to replace missing values for additional CAs: %+v\", s.Config.CAcfg)\n\t\tlog.Debugf(\"Additional CAs to be started: %s\", cfg.CAfiles)\n\t\tcaFiles := util.NormalizeStringSlice(cfg.CAfiles)\n\t\tfor _, caFile := range caFiles {\n\t\t\terr = s.loadCA(caFile, false)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func CreateDefaultPlans() error {\n\tfor _, plan := range plans {\n\t\tif err := EnsurePlan(plan); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (k *K8sutil) generateConfig(configDir, certsDir, namespace, clusterName string) error {\n\tcaConfig := caconfig{\n\t\tSigning: configSigning{\n\t\t\tDefault: configDefault{\n\t\t\t\tUsages: []string{\n\t\t\t\t\t\"signing\",\n\t\t\t\t\t\"key encipherment\",\n\t\t\t\t\t\"server auth\",\n\t\t\t\t\t\"client auth\",\n\t\t\t\t},\n\t\t\t\tExpiry: \"8760h\",\n\t\t\t},\n\t\t},\n\t}\n\n\tcaCSR := csr{\n\t\tHosts: []string{\n\t\t\t\"localhost\",\n\t\t\tfmt.Sprintf(\"elasticsearch-%s\", clusterName),\n\t\t\tfmt.Sprintf(\"%s.%s\", fmt.Sprintf(\"elasticsearch-%s\", clusterName), namespace),\n\t\t\tfmt.Sprintf(\"%s.%s.svc.cluster.local\", fmt.Sprintf(\"elasticsearch-%s\", clusterName), namespace),\n\t\t},\n\t\tKey: key{\n\t\t\tAlgo: \"rsa\",\n\t\t\tSize: 2048,\n\t\t},\n\t\tNames: []names{\n\t\t\tnames{\n\t\t\t\tC: \"US\",\n\t\t\t\tL: \"Pittsburgh\",\n\t\t\t\tO: \"elasticsearch-operator\",\n\t\t\t\tOU: \"k8s\",\n\t\t\t\tST: \"Pennsylvania\",\n\t\t\t},\n\t\t},\n\t}\n\n\tcaConfigJSON, err := json.Marshal(caConfig)\n\tif err != nil {\n\t\tlogrus.Error(\"json Marshal error : \", err)\n\t\treturn err\n\t}\n\tf, err := os.Create(fmt.Sprintf(\"%s/ca-config.json\", configDir))\n\t_, err = f.Write(caConfigJSON)\n\tif err != nil {\n\t\tlogrus.Error(\"Error creating ca-config.json: \", err)\n\t\treturn err\n\t}\n\n\treqCACSRJSON, _ := json.Marshal(caCSR)\n\tf, err = os.Create(fmt.Sprintf(\"%s/ca-csr.json\", configDir))\n\t_, err = f.Write(reqCACSRJSON)\n\tif err != nil {\n\t\tlogrus.Error(\"Error creating ca-csr.json: \", err)\n\t\treturn err\n\t}\n\n\tfor k, v := range map[string]string{\n\t\t\"node\": \"req-node-csr.json\",\n\t\t\"sgadmin\": \"req-sgadmin-csr.json\",\n\t\t\"kibana\": \"req-kibana-csr.json\",\n\t\t\"cerebro\": \"req-cerebro-csr.json\",\n\t} {\n\n\t\treq := csr{\n\t\t\tCN: k,\n\t\t\tHosts: []string{\n\t\t\t\t\"localhost\",\n\t\t\t\tfmt.Sprintf(\"%s-%s\", k, clusterName),\n\t\t\t\tfmt.Sprintf(\"%s.%s\", fmt.Sprintf(\"%s-%s\", k, clusterName), namespace),\n\t\t\t\tfmt.Sprintf(\"%s.%s.svc.cluster.local\", fmt.Sprintf(\"%s-%s\", k, clusterName), namespace),\n\t\t\t\tfmt.Sprintf(\"elasticsearch-%s\", clusterName),\n\t\t\t},\n\t\t\tKey: key{\n\t\t\t\tAlgo: \"rsa\",\n\t\t\t\tSize: 2048,\n\t\t\t},\n\t\t\tNames: []names{\n\t\t\t\tnames{\n\t\t\t\t\tO: \"autogenerated\",\n\t\t\t\t\tOU: \"elasticsearch cluster\",\n\t\t\t\t\tL: \"operator\",\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\n\t\tconfigJSON, _ := json.Marshal(req)\n\t\tf, err := os.Create(fmt.Sprintf(\"%s/%s\", configDir, v))\n\t\t_, err = f.Write(configJSON)\n\t\tif err != nil {\n\t\t\tlogrus.Error(err)\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func createDefaultConfig() component.Config {\n\treturn &Config{}\n}", "func createDefaultConfig() component.Config {\n\treturn &Config{}\n}", "func GenerateDefaultConfig(c *Config) {\n\tpk, sk := cipher.GenerateKeyPair()\n\n\tc.Path = DefaultConfigPath\n\tc.PubKey = pk\n\tc.SecKey = sk\n\tc.Discovery = defaultDiscoveryURL\n\tc.PublicAddress = defaultPublicAddress\n\tc.LocalAddress = defaultLocalAddress\n\tc.HTTPAddress = defaultHTTPAddress\n\tc.LogLevel = \"info\"\n\tc.MaxSessions = 2048\n}", "func createCertPool(rootCAs [][]byte) (*x509.CertPool, error) {\n\n\tcertPool := x509.NewCertPool()\n\tfor _, rootCA := range rootCAs {\n\t\tif !certPool.AppendCertsFromPEM(rootCA) {\n\t\t\treturn nil, errors.New(\"Failed to load root certificates\")\n\t\t}\n\t}\n\treturn certPool, nil\n}", "func createDefaultConfig() component.Config {\n\treturn &Config{\n\t\tScraperControllerSettings: scraperhelper.ScraperControllerSettings{\n\t\t\tCollectionInterval: defaultCollectionInterval,\n\t\t\tTimeout: defaultTimeout,\n\t\t},\n\t\tEndpoint: defaultEndpoint,\n\t\tVersion: defaultVersion,\n\t\tCommunity: defaultCommunity,\n\t\tSecurityLevel: defaultSecurityLevel,\n\t\tAuthType: defaultAuthType,\n\t\tPrivacyType: defaultPrivacyType,\n\t}\n}", "func createDefaultTestConfig() *common.Config {\n\tc := common.NewConfig()\n\tc.SetString(\"project_id\", -1, \"a-project\")\n\tc.SetString(\"topic\", -1, \"a-topic\")\n\n\tsConfig := common.NewConfig()\n\tsConfig.SetString(\"name\", -1, \"a-subscription\")\n\tsConfig.SetBool(\"retain_acked_messages\", -1, false)\n\tsConfig.SetString(\"retention_duration\", -1, \"10m\")\n\tc.SetChild(\"subscription\", -1, sConfig)\n\n\treturn c\n}", "func createCalibrationFiles(ctx context.Context, d *rpcdut.RPCDUT, soundCardID string, count uint) error {\n\tfs := dutfs.NewClient(d.RPC().Conn)\n\tfor i := 0; i < int(count); i++ {\n\t\tf := fmt.Sprintf(calibrationFiles, soundCardID, i)\n\t\texists, err := fs.Exists(ctx, f)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"failed to stat %s\", f)\n\t\t}\n\t\tif !exists {\n\t\t\tif err := fs.WriteFile(ctx, f, []byte(calibYAMLContent), 0644); err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"failed to create %s\", f)\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func createMainConfig(file *string, paths *string, basename *string) *viper.Viper {\n\tconfig := viper.New()\n\n\tconfig.SetEnvPrefix(utils.ConfigPrefix) // Env vars start with 'SLA_'\n\tconfig.AutomaticEnv()\n\tconfig.SetDefault(utils.CheckPeriodPropertyName, utils.DefaultCheckPeriod)\n\tconfig.SetDefault(utils.RepositoryTypePropertyName, utils.DefaultRepositoryType)\n\tconfig.SetDefault(utils.ExternalIDsPropertyName, utils.DefaultExternalIDs)\n\n\tif *file != \"\" {\n\t\tconfig.SetConfigFile(*file)\n\t} else {\n\t\tconfig.SetConfigName(*basename)\n\t\tfor _, path := range strings.Split(*paths, \":\") {\n\t\t\tconfig.AddConfigPath(path)\n\t\t}\n\t}\n\n\terrConfig := config.ReadInConfig()\n\tif errConfig != nil {\n\t\tlog.Println(\"Can't find configuration file: \" + errConfig.Error())\n\t\tlog.Println(\"Using defaults\")\n\t}\n\treturn config\n}", "func createDefaultConfig() component.Config {\n\treturn &Config{\n\t\tProtocols: Protocols{\n\t\t\tGRPC: &configgrpc.GRPCServerSettings{\n\t\t\t\tNetAddr: confignet.NetAddr{\n\t\t\t\t\tEndpoint: defaultGRPCEndpoint,\n\t\t\t\t\tTransport: \"tcp\",\n\t\t\t\t},\n\t\t\t\t// We almost write 0 bytes, so no need to tune WriteBufferSize.\n\t\t\t\tReadBufferSize: 512 * 1024,\n\t\t\t},\n\t\t\tHTTP: &HTTPConfig{\n\t\t\t\tHTTPServerSettings: &confighttp.HTTPServerSettings{\n\t\t\t\t\tEndpoint: defaultHTTPEndpoint,\n\t\t\t\t},\n\t\t\t\tTracesURLPath: defaultTracesURLPath,\n\t\t\t\tMetricsURLPath: defaultMetricsURLPath,\n\t\t\t\tLogsURLPath: defaultLogsURLPath,\n\t\t\t},\n\t\t},\n\t}\n}", "func DefaultClientCertPool(client AccessCache, clusterName string) (*x509.CertPool, int64, error) {\n\treturn ClientCertPool(client, clusterName, types.HostCA, types.UserCA)\n}", "func CreateDefaultConfigFile(filename string) {\n\tif err := ioutil.WriteFile(filename, []byte(defaultConfig()), 0644); err != nil {\n\t\tlogging.VLog().WithFields(logrus.Fields{\n\t\t\t\"err\": err,\n\t\t}).Fatal(\"Failed to create default config file.\")\n\t}\n}", "func ensureDefaultConfig(ctx context.Context, cfg *apiconfig.CalicoAPIConfig, c client.Interface, node *libapi.Node, osType string, kubeadmConfig, rancherState *v1.ConfigMap) error {\n\t// Ensure the ClusterInformation is populated.\n\t// Get the ClusterType from ENV var. This is set from the manifest.\n\tclusterType := os.Getenv(\"CLUSTER_TYPE\")\n\n\tif kubeadmConfig != nil {\n\t\tif len(clusterType) == 0 {\n\t\t\tclusterType = \"kubeadm\"\n\t\t} else {\n\t\t\tclusterType += \",kubeadm\"\n\t\t}\n\t}\n\n\tif rancherState != nil {\n\t\tif len(clusterType) == 0 {\n\t\t\tclusterType = \"rancher\"\n\t\t} else {\n\t\t\tclusterType += \",rancher\"\n\t\t}\n\t}\n\n\tif osType != OSTypeLinux {\n\t\tif len(clusterType) == 0 {\n\t\t\tclusterType = osType\n\t\t} else {\n\t\t\tclusterType += \",\" + osType\n\t\t}\n\t}\n\n\tif err := c.EnsureInitialized(ctx, VERSION, clusterType); err != nil {\n\t\treturn nil\n\t}\n\n\t// By default we set the global reporting interval to 0 - this is\n\t// different from the defaults defined in Felix.\n\t//\n\t// Logging to file is disabled in the felix.cfg config file. This\n\t// should always be disabled for calico/node. By default we log to\n\t// screen - set the default logging value that we desire.\n\tfelixConf, err := c.FelixConfigurations().Get(ctx, globalFelixConfigName, options.GetOptions{})\n\tif err != nil {\n\t\t// Create the default config if it doesn't already exist.\n\t\tif _, ok := err.(cerrors.ErrorResourceDoesNotExist); ok {\n\t\t\tnewFelixConf := api.NewFelixConfiguration()\n\t\t\tnewFelixConf.Name = globalFelixConfigName\n\t\t\tnewFelixConf.Spec.ReportingInterval = &metav1.Duration{Duration: 0}\n\t\t\tnewFelixConf.Spec.LogSeverityScreen = defaultLogSeverity\n\t\t\t_, err = c.FelixConfigurations().Create(ctx, newFelixConf, options.SetOptions{})\n\t\t\tif err != nil {\n\t\t\t\tif conflict, ok := err.(cerrors.ErrorResourceAlreadyExists); ok {\n\t\t\t\t\tlog.Infof(\"Ignoring conflict when setting value %s\", conflict.Identifier)\n\t\t\t\t} else {\n\t\t\t\t\tlog.WithError(err).WithField(\"FelixConfig\", newFelixConf).Errorf(\"Error creating Felix global config\")\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tlog.WithError(err).WithField(\"FelixConfig\", globalFelixConfigName).Errorf(\"Error getting Felix global config\")\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\tupdateNeeded := false\n\t\tif felixConf.Spec.ReportingInterval == nil {\n\t\t\tfelixConf.Spec.ReportingInterval = &metav1.Duration{Duration: 0}\n\t\t\tupdateNeeded = true\n\t\t} else {\n\t\t\tlog.WithField(\"ReportingInterval\", felixConf.Spec.ReportingInterval).Debug(\"Global Felix value already assigned\")\n\t\t}\n\n\t\tif felixConf.Spec.LogSeverityScreen == \"\" {\n\t\t\tfelixConf.Spec.LogSeverityScreen = defaultLogSeverity\n\t\t\tupdateNeeded = true\n\t\t} else {\n\t\t\tlog.WithField(\"LogSeverityScreen\", felixConf.Spec.LogSeverityScreen).Debug(\"Global Felix value already assigned\")\n\t\t}\n\n\t\tif updateNeeded {\n\t\t\t_, err = c.FelixConfigurations().Update(ctx, felixConf, options.SetOptions{})\n\t\t\tif err != nil {\n\t\t\t\tif conflict, ok := err.(cerrors.ErrorResourceUpdateConflict); ok {\n\t\t\t\t\tlog.Infof(\"Ignoring conflict when setting value %s\", conflict.Identifier)\n\t\t\t\t} else {\n\t\t\t\t\tlog.WithError(err).WithField(\"FelixConfig\", felixConf).Errorf(\"Error updating Felix global config\")\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Configure Felix to allow traffic from the containers to the host (if\n\t// not otherwise firewalled by the host administrator or profiles).\n\t// This is important for container deployments, where it is common\n\t// for containers to speak to services running on the host (e.g. k8s\n\t// pods speaking to k8s api-server, and mesos tasks registering with agent\n\t// on startup). Note: KDD does not yet support per-node felix config.\n\tif cfg.Spec.DatastoreType != apiconfig.Kubernetes {\n\t\tfelixNodeCfg, err := c.FelixConfigurations().Get(ctx, fmt.Sprintf(\"%s%s\", felixNodeConfigNamePrefix, node.Name), options.GetOptions{})\n\t\tif err != nil {\n\t\t\t// Create the default config if it doesn't already exist.\n\t\t\tif _, ok := err.(cerrors.ErrorResourceDoesNotExist); ok {\n\t\t\t\tnewFelixNodeCfg := api.NewFelixConfiguration()\n\t\t\t\tnewFelixNodeCfg.Name = fmt.Sprintf(\"%s%s\", felixNodeConfigNamePrefix, node.Name)\n\t\t\t\tnewFelixNodeCfg.Spec.DefaultEndpointToHostAction = \"Return\"\n\t\t\t\t_, err = c.FelixConfigurations().Create(ctx, newFelixNodeCfg, options.SetOptions{})\n\t\t\t\tif err != nil {\n\t\t\t\t\tif exists, ok := err.(cerrors.ErrorResourceAlreadyExists); ok {\n\t\t\t\t\t\tlog.Infof(\"Ignoring resource exists error when setting value %s\", exists.Identifier)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tlog.WithError(err).WithField(\"FelixConfig\", newFelixNodeCfg).Errorf(\"Error creating Felix node config\")\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tlog.WithError(err).WithField(\"FelixConfig\", felixNodeConfigNamePrefix).Errorf(\"Error getting Felix node config\")\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else {\n\t\t\tif felixNodeCfg.Spec.DefaultEndpointToHostAction == \"\" {\n\t\t\t\tfelixNodeCfg.Spec.DefaultEndpointToHostAction = \"Return\"\n\t\t\t\t_, err = c.FelixConfigurations().Update(ctx, felixNodeCfg, options.SetOptions{})\n\t\t\t\tif err != nil {\n\t\t\t\t\tif conflict, ok := err.(cerrors.ErrorResourceUpdateConflict); ok {\n\t\t\t\t\t\tlog.Infof(\"Ignoring conflict when setting value %s\", conflict.Identifier)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tlog.WithError(err).WithField(\"FelixConfig\", felixNodeCfg).Errorf(\"Error updating Felix node config\")\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tlog.WithField(\"DefaultEndpointToHostAction\", felixNodeCfg.Spec.DefaultEndpointToHostAction).Debug(\"Host Felix value already assigned\")\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (c *Client) CreateAutosalesConfigs(acs []*AutosalesConfig) ([]int64, error) {\n\tvar vv []interface{}\n\tfor _, v := range acs {\n\t\tvv = append(vv, v)\n\t}\n\treturn c.Create(AutosalesConfigModel, vv)\n}", "func CreateNATSServerConfigsNoTLS(count int) []*server.NATSServerConfig {\n\tconfigs := []*server.NATSServerConfig{}\n\tfor i := 0; i < count; i++ {\n\t\tconfig := &server.NATSServerConfig{\n\t\t\tCluster: messaging.ClusterName(\"osyterpack-test\"),\n\t\t\tServerPort: server.DEFAULT_SERVER_PORT + i,\n\t\t\tMonitorPort: server.DEFAULT_MONITOR_PORT + i,\n\t\t\tClusterPort: server.DEFAULT_CLUSTER_PORT + i,\n\t\t\tMetricsExporterPort: server.DEFAULT_PROMETHEUS_EXPORTER_HTTP_PORT + i,\n\n\t\t\tRoutes: defaultRoutesWithSeed(),\n\t\t\tLogLevel: server.DEBUG,\n\t\t}\n\t\tconfigs = append(configs, config)\n\t}\n\treturn configs\n}", "func (c *Catalog) DefaultCA(name string, ca credsgen.Certificate) corev1.Secret {\n\treturn corev1.Secret{\n\t\tObjectMeta: metav1.ObjectMeta{Name: name},\n\t\tData: map[string][]byte{\n\t\t\t\"ca\": ca.Certificate,\n\t\t\t\"ca_key\": ca.PrivateKey,\n\t\t},\n\t}\n}", "func CreateBasic(serverURL, clusterName, userName string, caCert []byte) *clientcmdapi.Config {\n\t// Use the cluster and the username as the context name\n\tcontextName := fmt.Sprintf(\"%s@%s\", userName, clusterName)\n\n\treturn &clientcmdapi.Config{\n\t\tClusters: map[string]*clientcmdapi.Cluster{\n\t\t\tclusterName: {\n\t\t\t\tServer: serverURL,\n\t\t\t\tCertificateAuthorityData: caCert,\n\t\t\t},\n\t\t},\n\t\tContexts: map[string]*clientcmdapi.Context{\n\t\t\tcontextName: {\n\t\t\t\tCluster: clusterName,\n\t\t\t\tAuthInfo: userName,\n\t\t\t},\n\t\t},\n\t\tAuthInfos: map[string]*clientcmdapi.AuthInfo{},\n\t\tCurrentContext: contextName,\n\t}\n}", "func GenCerts(hosts []string, outname string, isCA bool) (err error) {\n\tpriv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"GenerateKey: %v\", err)\n\t}\n\ttemplate := x509.Certificate{\n\t\tSerialNumber: big.NewInt(1),\n\t\tSubject: pkix.Name{\n\t\t\tOrganization: []string{\"Acme Co\"},\n\t\t},\n\t\tNotBefore: time.Now(),\n\t\tNotAfter: time.Now().Add(time.Hour * 24 * 3650),\n\n\t\tKeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},\n\t\tBasicConstraintsValid: true,\n\t}\n\n\tvar (\n\t\tcakey *ecdsa.PrivateKey\n\t\tcacrt *x509.Certificate\n\t\tderBytes []byte\n\t)\n\n\t// valid for these names\n\tif isCA {\n\t\ttemplate.IsCA = true\n\t\ttemplate.KeyUsage |= x509.KeyUsageCertSign\n\t\toutname = \"ca\"\n\t\tderBytes, err = x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create certificate: %v\", err)\n\t\t}\n\t} else {\n\t\tfor _, h := range hosts {\n\t\t\tif ip := net.ParseIP(h); ip != nil {\n\t\t\t\ttemplate.IPAddresses = append(template.IPAddresses, ip)\n\t\t\t} else {\n\t\t\t\ttemplate.DNSNames = append(template.DNSNames, h)\n\t\t\t}\n\t\t}\n\n\t\t// ca key file\n\t\tca_data, err := os.ReadFile(\"ca-key.pem\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Read ca-key.pem: %v\", err)\n\t\t}\n\t\tblock, _ := pem.Decode(ca_data)\n\t\tcakey, _ = x509.ParseECPrivateKey(block.Bytes)\n\n\t\t// ca cert file\n\t\tca_data, err = os.ReadFile(\"ca-cert.pem\")\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Read ca-cert.pem: %v\", err)\n\t\t}\n\t\tblock, _ = pem.Decode(ca_data)\n\t\tcacrt, _ = x509.ParseCertificate(block.Bytes)\n\n\t\t// generate C2 server certificate, signed by our CA\n\t\tderBytes, err = x509.CreateCertificate(rand.Reader, &template, cacrt, publicKey(priv), cakey)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create certificate: %v\", err)\n\t\t}\n\t}\n\n\t// output to pem files\n\tout := &bytes.Buffer{}\n\toutcert := fmt.Sprintf(\"%s-cert.pem\", outname)\n\toutkey := fmt.Sprintf(\"%s-key.pem\", outname)\n\t// cert\n\tpem.Encode(out, &pem.Block{Type: \"CERTIFICATE\", Bytes: derBytes})\n\terr = os.WriteFile(outcert, out.Bytes(), 0600)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Write %s: %v\", outcert, err)\n\t}\n\tout.Reset()\n\n\t// key\n\tpem.Encode(out, pemBlockForKey(priv))\n\terr = os.WriteFile(outkey, out.Bytes(), 0600)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"Write %s: %v\", outkey, err)\n\t}\n\n\treturn\n}", "func NewDefaultSources(filePaths []string, prefix, delimiter string, additionalSources ...Source) (sources []Source) {\n\tfileSources := NewYAMLFileSources(filePaths)\n\tfor _, source := range fileSources {\n\t\tsources = append(sources, source)\n\t}\n\n\tsources = append(sources, NewEnvironmentSource(prefix, delimiter))\n\tsources = append(sources, NewSecretsSource(prefix, delimiter))\n\n\tif len(additionalSources) != 0 {\n\t\tsources = append(sources, additionalSources...)\n\t}\n\n\treturn sources\n}", "func CreateCryptoConfig(recipients []string, keys []string) (encconfig.CryptoConfig, error) {\n\tvar decryptCc *encconfig.CryptoConfig\n\tccs := []encconfig.CryptoConfig{}\n\tif len(keys) > 0 {\n\t\tdcc, err := CreateDecryptCryptoConfig(keys, []string{})\n\t\tif err != nil {\n\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t}\n\t\tdecryptCc = &dcc\n\t\tccs = append(ccs, dcc)\n\t}\n\n\tif len(recipients) > 0 {\n\t\tgpgRecipients, pubKeys, x509s, pkcs11Pubkeys, pkcs11Yamls, keyProvider, err := processRecipientKeys(recipients)\n\t\tif err != nil {\n\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t}\n\t\tencryptCcs := []encconfig.CryptoConfig{}\n\n\t\t// Create GPG client with guessed GPG version and default homedir\n\t\tgpgClient, err := ocicrypt.NewGPGClient(\"\", \"\")\n\t\tgpgInstalled := err == nil\n\t\tif len(gpgRecipients) > 0 && gpgInstalled {\n\t\t\tgpgPubRingFile, err := gpgClient.ReadGPGPubRingFile()\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\n\t\t\tgpgCc, err := encconfig.EncryptWithGpg(gpgRecipients, gpgPubRingFile)\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\t\t\tencryptCcs = append(encryptCcs, gpgCc)\n\t\t}\n\n\t\t// Create Encryption Crypto Config\n\t\tif len(x509s) > 0 {\n\t\t\tpkcs7Cc, err := encconfig.EncryptWithPkcs7(x509s)\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\t\t\tencryptCcs = append(encryptCcs, pkcs7Cc)\n\t\t}\n\t\tif len(pubKeys) > 0 {\n\t\t\tjweCc, err := encconfig.EncryptWithJwe(pubKeys)\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\t\t\tencryptCcs = append(encryptCcs, jweCc)\n\t\t}\n\t\tvar p11conf *pkcs11.Pkcs11Config\n\t\tif len(pkcs11Yamls) > 0 || len(pkcs11Pubkeys) > 0 {\n\t\t\tp11conf, err = pkcs11config.GetUserPkcs11Config()\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\t\t\tpkcs11Cc, err := encconfig.EncryptWithPkcs11(p11conf, pkcs11Pubkeys, pkcs11Yamls)\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\t\t\tencryptCcs = append(encryptCcs, pkcs11Cc)\n\t\t}\n\n\t\tif len(keyProvider) > 0 {\n\t\t\tkeyProviderCc, err := encconfig.EncryptWithKeyProvider(keyProvider)\n\t\t\tif err != nil {\n\t\t\t\treturn encconfig.CryptoConfig{}, err\n\t\t\t}\n\t\t\tencryptCcs = append(encryptCcs, keyProviderCc)\n\t\t}\n\t\tecc := encconfig.CombineCryptoConfigs(encryptCcs)\n\t\tif decryptCc != nil {\n\t\t\tecc.EncryptConfig.AttachDecryptConfig(decryptCc.DecryptConfig)\n\t\t}\n\t\tccs = append(ccs, ecc)\n\t}\n\n\tif len(ccs) > 0 {\n\t\treturn encconfig.CombineCryptoConfigs(ccs), nil\n\t}\n\treturn encconfig.CryptoConfig{}, nil\n}", "func GenerateCA(c *cli.Context) error {\n\thost := c.String(\"host\")\n\n\trsaBits := c.Int(\"rsa-bits\")\n\tecdsaCurve := c.String(\"ecdsa-curve\")\n\n\tvalidFrom := c.String(\"start-date\")\n\n\tvalidFor := c.Duration(\"duration\")\n\tcert, key, err := Ca(host, rsaBits, ecdsaCurve, validFrom, validFor)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create certificate: %s\", err)\n\t}\n\tvar certname = \"0.cert\"\n\tvar keyname = \"0.key\"\n\n\tcertout, err := os.Create(certname)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to open \"+certname+\" for writing: %s\", err)\n\t}\n\tpem.Encode(certout, &cert)\n\tcertout.Close()\n\tlog.Print(\"written \" + certname + \"\\n\")\n\n\tkeyout, err := os.OpenFile(keyname, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\tlog.Print(\"failed to open \"+keyname+\" for writing:\", err)\n\t\treturn nil\n\t}\n\tpem.Encode(keyout, &key)\n\tkeyout.Close()\n\tlog.Print(\"written \" + keyname + \"\\n\")\n\treturn nil\n}", "func CreateDefaultExtensionConfig() []string {\n\treturn []string{\"uuid-ossp\", \"pg_trgm\"}\n}", "func TestNewConfigDefaultAndFile(t *testing.T) {\n\tconfig, err := NewConfig(\"configs/simple.yaml\")\n\trequire.NoError(t, err)\n\t// Ensure custom configs are loaded\n\trequire.Equal(t, true, config.LogRecovery)\n\trequire.Equal(t, int64(1024), config.Streams.RetentionMaxBytes)\n\n\t// Ensure also default values are loaded at the same time\n\trequire.Equal(t, 512, config.Clustering.RaftCacheSize)\n\trequire.Equal(t, \"liftbridge-default\", config.Clustering.Namespace)\n\trequire.Equal(t, 1024, config.BatchMaxMessages)\n}", "func (r *reconciler) createRouterCAConfigMap(cm *corev1.ConfigMap) error {\n\tif err := r.Client.Create(context.TODO(), cm); err != nil {\n\t\tif errors.IsAlreadyExists(err) {\n\t\t\treturn nil\n\t\t}\n\t\treturn err\n\t}\n\tlogrus.Infof(\"created configmap %s/%s\", cm.Namespace, cm.Name)\n\treturn nil\n}", "func CreateDefaultCephConfig(context *clusterd.Context, clusterInfo *ClusterInfo) (*CephConfig, error) {\n\n\tcephVersionEnv := os.Getenv(\"ROOK_CEPH_VERSION\")\n\tif cephVersionEnv != \"\" {\n\t\tv, err := cephver.ExtractCephVersion(cephVersionEnv)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"failed to extract ceph version\")\n\t\t}\n\t\tclusterInfo.CephVersion = *v\n\t}\n\n\t// extract a list of just the monitor names, which will populate the \"mon initial members\"\n\t// and \"mon hosts\" global config field\n\tmonMembers, monHosts := PopulateMonHostMembers(clusterInfo)\n\n\tconf := &CephConfig{\n\t\tGlobalConfig: &GlobalConfig{\n\t\t\tFSID: clusterInfo.FSID,\n\t\t\tMonMembers: strings.Join(monMembers, \" \"),\n\t\t\tMonHost: strings.Join(monHosts, \",\"),\n\t\t},\n\t}\n\n\treturn conf, nil\n}", "func MakeDefaultConfig(clusterGRPCAddress string, oauthServerAddress string, insecure bool) Config {\n\treturn Config{\n\t\tBase: conf.Base{\n\t\t\tLog: conf.Log{\n\t\t\t\tFormat: \"console\",\n\t\t\t\tLevel: log.InfoLevel,\n\t\t\t},\n\t\t},\n\t\tInputFormat: \"json\",\n\t\tOutputFormat: \"json\",\n\t\tOAuthServerAddress: oauthServerAddress,\n\t\tIdentityServerGRPCAddress: clusterGRPCAddress,\n\t\tGatewayServerEnabled: true,\n\t\tGatewayServerGRPCAddress: clusterGRPCAddress,\n\t\tNetworkServerEnabled: true,\n\t\tNetworkServerGRPCAddress: clusterGRPCAddress,\n\t\tApplicationServerEnabled: true,\n\t\tApplicationServerGRPCAddress: clusterGRPCAddress,\n\t\tJoinServerEnabled: true,\n\t\tJoinServerGRPCAddress: clusterGRPCAddress,\n\t\tDeviceTemplateConverterGRPCAddress: clusterGRPCAddress,\n\t\tDeviceClaimingServerGRPCAddress: clusterGRPCAddress,\n\t\tQRCodeGeneratorGRPCAddress: clusterGRPCAddress,\n\t\tPacketBrokerAgentGRPCAddress: clusterGRPCAddress,\n\t\tInsecure: insecure,\n\t\tRetry: defaultRetryConfig,\n\t\tTelemetry: defaultTelemetryConfig,\n\t}\n}", "func createDefaultConfig() configmodels.Processor {\n\treturn &Config{\n\t\tProcessorSettings: configmodels.ProcessorSettings{\n\t\t\tTypeVal: typeStr,\n\t\t\tNameVal: typeStr,\n\t\t},\n\t}\n}", "func (c ComponentSettings) CreateDefaultConfig() (*configmodels.Config, error) {\n\texporters, err := createExporters(c.ComponentType, c.StorageType, c.Factories)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treceivers := createReceivers(c.ComponentType, c.ZipkinHostPort, c.Factories)\n\tprocessors, processorNames := createProcessors(c.Factories)\n\thc := c.Factories.Extensions[\"health_check\"].CreateDefaultConfig()\n\treturn &configmodels.Config{\n\t\tReceivers: receivers,\n\t\tProcessors: processors,\n\t\tExporters: exporters,\n\t\tExtensions: configmodels.Extensions{hc.Name(): hc},\n\t\tService: configmodels.Service{\n\t\t\tExtensions: []string{hc.Name()},\n\t\t\tPipelines: configmodels.Pipelines{\n\t\t\t\tstring(configmodels.TracesDataType): {\n\t\t\t\t\tInputType: configmodels.TracesDataType,\n\t\t\t\t\tReceivers: receiverNames(receivers),\n\t\t\t\t\tProcessors: processorNames,\n\t\t\t\t\tExporters: exporterNames(exporters),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}, nil\n}", "func MakeCertificateConfig(name, country, state, locality, organization string,\n\thosts, emailAddresses []string, ca bool) BasicCertificateConfig {\n\n\tdn := pkix.Name{\n\t\tCommonName: name,\n\t\tCountry: []string{country},\n\t\tProvince: []string{state},\n\t\tLocality: []string{locality},\n\t\tOrganization: []string{organization},\n\t}\n\n\treturn BasicCertificateConfig{\n\t\tname: dn,\n\t\temailAddresses: emailAddresses,\n\t\tisCA: ca,\n\t\thosts: hosts,\n\t}\n\n}", "func CreateDefaultFlags(ctx *E2EContext) {\n\tflag.StringVar(&ctx.Settings.ConfigPath, \"config-path\", \"\", \"path to the e2e config file\")\n\tflag.StringVar(&ctx.Settings.ArtifactFolder, \"artifacts-folder\", \"\", \"folder where e2e test artifact should be stored\")\n\tflag.BoolVar(&ctx.Settings.UseCIArtifacts, \"kubetest.use-ci-artifacts\", false, \"use the latest build from the main branch of the Kubernetes repository\")\n\tflag.StringVar(&ctx.Settings.KubetestConfigFilePath, \"kubetest.config-file\", \"\", \"path to the kubetest configuration file\")\n\tflag.IntVar(&ctx.Settings.GinkgoNodes, \"kubetest.ginkgo-nodes\", 1, \"number of ginkgo nodes to use\")\n\tflag.IntVar(&ctx.Settings.GinkgoSlowSpecThreshold, \"kubetest.ginkgo-slowSpecThreshold\", 120, \"time in s before spec is marked as slow\")\n\tflag.BoolVar(&ctx.Settings.UseExistingCluster, \"use-existing-cluster\", false, \"if true, the test uses the current cluster instead of creating a new one (default discovery rules apply)\")\n\tflag.BoolVar(&ctx.Settings.SkipCleanup, \"skip-cleanup\", false, \"if true, the resource cleanup after tests will be skipped\")\n\tflag.StringVar(&ctx.Settings.DataFolder, \"data-folder\", \"\", \"path to the data folder\")\n\tflag.StringVar(&ctx.Settings.SourceTemplate, \"source-template\", \"./infrastructure-openstack/cluster-template.yaml\", \"path to the data folder\")\n}", "func (o *Organizations) CreateDefault(ctx context.Context) error {\n\treturn fmt.Errorf(\"unable to create default organizations on the filesystem\")\n}", "func setDefaultCredentials(t *testing.T, p *KeyMgrBasedProvider, caKey crypto.PrivateKey, caCert *x509.Certificate) (crypto.PrivateKey, *x509.Certificate) {\n\tkey, err := p.CreateDefaultKeyPair(keymgr.ECDSA256)\n\tAssertOk(t, err, \"Error creating default keypair\")\n\tcert := getCert(t, \"client\", key, caKey, caCert)\n\terr = p.SetDefaultCertificate(cert)\n\tAssertOk(t, err, \"Error setting default certificate\")\n\treturn key, cert\n}", "func createKubeconfig(clusterName, username, clusterControlPlaceAddress, caBasebase64, crtBase64, privateKeyBase64 string) (kubeconfigYAML string) {\n\tcertificate_tpl := `---\napiVersion: v1\nkind: Config\ncurrent-context: %s\nclusters:\n - name: %s\n cluster:\n server: %s\n certificate-authority-data: %s\ncontexts:\n - context:\n cluster: %s\n user: %s\n name: %s@%s\nusers:\n - name: %s\n user:\n client-certificate-data: %s\n client-key-data: %s`\n\n\treturn fmt.Sprintf(certificate_tpl,\n\t\tclusterName,\n\t\tclusterName,\n\t\tclusterControlPlaceAddress,\n\t\tcaBasebase64,\n\t\tclusterName,\n\t\tusername,\n\t\tusername,\n\t\tclusterName,\n\t\tusername,\n\t\tcrtBase64,\n\t\tprivateKeyBase64,\n\t)\n}", "func DefaultConfig() Config {\n\tencoding := cosmoscmd.MakeEncodingConfig(app.ModuleBasics)\n\n\treturn Config{\n\t\tCodec: encoding.Marshaler,\n\t\tTxConfig: encoding.TxConfig,\n\t\tLegacyAmino: encoding.Amino,\n\t\tInterfaceRegistry: encoding.InterfaceRegistry,\n\t\tAccountRetriever: dclauthtypes.AccountRetriever{},\n\t\tAppConstructor: func(val Validator) servertypes.Application {\n\t\t\treturn app.New(\n\t\t\t\tval.Ctx.Logger, tmdb.NewMemDB(), nil, true, map[int64]bool{}, val.Ctx.Config.RootDir, 0,\n\t\t\t\tencoding,\n\t\t\t\tsimapp.EmptyAppOptions{},\n\t\t\t\tbaseapp.SetPruning(storetypes.NewPruningOptionsFromString(val.AppConfig.Pruning)),\n\t\t\t\tbaseapp.SetMinGasPrices(val.AppConfig.MinGasPrices),\n\t\t\t)\n\t\t},\n\t\tGenesisState: app.ModuleBasics.DefaultGenesis(encoding.Marshaler),\n\t\tTimeoutCommit: 2 * time.Second,\n\t\tChainID: \"chain-\" + tmrand.NewRand().Str(6),\n\t\tNumValidators: 1,\n\t\tBondDenom: sdk.DefaultBondDenom,\n\t\tMinGasPrices: fmt.Sprintf(\"0.000006%s\", sdk.DefaultBondDenom),\n\t\tAccountTokens: sdk.TokensFromConsensusPower(1000, sdk.DefaultPowerReduction),\n\t\tStakingTokens: sdk.TokensFromConsensusPower(500, sdk.DefaultPowerReduction),\n\t\tBondedTokens: sdk.TokensFromConsensusPower(100, sdk.DefaultPowerReduction),\n\t\tPruningStrategy: storetypes.PruningOptionNothing,\n\t\tCleanupDir: true,\n\t\tSigningAlgo: string(hd.Secp256k1Type),\n\t\tKeyringOptions: []keyring.Option{},\n\t}\n}", "func (o *BundleDefinitionOptions) defaultBundleFiles(cxt *portercontext.Context) error {\n\tif o.File != \"\" { // --file\n\t\to.defaultCNABFile()\n\t} else if o.CNABFile != \"\" { // --cnab-file\n\t\t// Nothing to default\n\t} else {\n\t\tdefaultPath := filepath.Join(o.Dir, config.Name)\n\t\tmanifestExists, err := cxt.FileSystem.Exists(defaultPath)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"could not find a porter manifest at %s: %w\", defaultPath, err)\n\t\t} else if !manifestExists {\n\t\t\treturn nil\n\t\t}\n\n\t\to.File = defaultPath\n\t\to.defaultCNABFile()\n\t}\n\n\treturn nil\n}", "func (conf *ThrapConfig) DefaultVCS() *VCSConfig {\n\tfor _, v := range conf.VCS {\n\t\treturn v\n\t}\n\treturn nil\n}", "func createDefaultConfig() config.Receiver {\n\treturn &Config{\n\t\tReceiverSettings: config.NewReceiverSettings(config.NewComponentID(typeStr)),\n\t}\n}", "func TestNewConfigDefault(t *testing.T) {\n\tconfig, err := NewConfig(\"\")\n\trequire.NoError(t, err)\n\trequire.Equal(t, 512, config.Clustering.RaftCacheSize)\n\trequire.Equal(t, \"liftbridge-default\", config.Clustering.Namespace)\n\trequire.Equal(t, 1024, config.BatchMaxMessages)\n}", "func DefaultConfig() (*generate.Generator, error) {\n\tvar err error\n\n\tconfig := specs.Spec{\n\t\tVersion: specs.Version,\n\t\tHostname: \"mrsdalloway\",\n\t}\n\n\tconfig.Root = &specs.Root{\n\t\tPath: \"rootfs\",\n\t\tReadonly: false,\n\t}\n\tconfig.Process = &specs.Process{\n\t\tTerminal: false,\n\t\tArgs: []string{\n\t\t\t\"sh\",\n\t\t},\n\t}\n\n\tconfig.Process.User = specs.User{}\n\tconfig.Process.Env = []string{\n\t\t\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\",\n\t\t\"TERM=xterm\",\n\t}\n\tconfig.Process.Cwd = \"/\"\n\tconfig.Process.Rlimits = []specs.POSIXRlimit{\n\t\t{\n\t\t\tType: \"RLIMIT_NOFILE\",\n\t\t\tHard: uint64(1024),\n\t\t\tSoft: uint64(1024),\n\t\t},\n\t}\n\n\tconfig.Process.Capabilities = &specs.LinuxCapabilities{\n\t\tBounding: []string{\n\t\t\t\"CAP_CHOWN\",\n\t\t\t\"CAP_DAC_OVERRIDE\",\n\t\t\t\"CAP_FSETID\",\n\t\t\t\"CAP_FOWNER\",\n\t\t\t\"CAP_MKNOD\",\n\t\t\t\"CAP_NET_RAW\",\n\t\t\t\"CAP_SETGID\",\n\t\t\t\"CAP_SETUID\",\n\t\t\t\"CAP_SETFCAP\",\n\t\t\t\"CAP_SETPCAP\",\n\t\t\t\"CAP_NET_BIND_SERVICE\",\n\t\t\t\"CAP_SYS_CHROOT\",\n\t\t\t\"CAP_KILL\",\n\t\t\t\"CAP_AUDIT_WRITE\",\n\t\t},\n\t\tPermitted: []string{\n\t\t\t\"CAP_CHOWN\",\n\t\t\t\"CAP_DAC_OVERRIDE\",\n\t\t\t\"CAP_FSETID\",\n\t\t\t\"CAP_FOWNER\",\n\t\t\t\"CAP_MKNOD\",\n\t\t\t\"CAP_NET_RAW\",\n\t\t\t\"CAP_SETGID\",\n\t\t\t\"CAP_SETUID\",\n\t\t\t\"CAP_SETFCAP\",\n\t\t\t\"CAP_SETPCAP\",\n\t\t\t\"CAP_NET_BIND_SERVICE\",\n\t\t\t\"CAP_SYS_CHROOT\",\n\t\t\t\"CAP_KILL\",\n\t\t\t\"CAP_AUDIT_WRITE\",\n\t\t},\n\t\tInheritable: []string{\n\t\t\t\"CAP_CHOWN\",\n\t\t\t\"CAP_DAC_OVERRIDE\",\n\t\t\t\"CAP_FSETID\",\n\t\t\t\"CAP_FOWNER\",\n\t\t\t\"CAP_MKNOD\",\n\t\t\t\"CAP_NET_RAW\",\n\t\t\t\"CAP_SETGID\",\n\t\t\t\"CAP_SETUID\",\n\t\t\t\"CAP_SETFCAP\",\n\t\t\t\"CAP_SETPCAP\",\n\t\t\t\"CAP_NET_BIND_SERVICE\",\n\t\t\t\"CAP_SYS_CHROOT\",\n\t\t\t\"CAP_KILL\",\n\t\t\t\"CAP_AUDIT_WRITE\",\n\t\t},\n\t\tEffective: []string{\n\t\t\t\"CAP_CHOWN\",\n\t\t\t\"CAP_DAC_OVERRIDE\",\n\t\t\t\"CAP_FSETID\",\n\t\t\t\"CAP_FOWNER\",\n\t\t\t\"CAP_MKNOD\",\n\t\t\t\"CAP_NET_RAW\",\n\t\t\t\"CAP_SETGID\",\n\t\t\t\"CAP_SETUID\",\n\t\t\t\"CAP_SETFCAP\",\n\t\t\t\"CAP_SETPCAP\",\n\t\t\t\"CAP_NET_BIND_SERVICE\",\n\t\t\t\"CAP_SYS_CHROOT\",\n\t\t\t\"CAP_KILL\",\n\t\t\t\"CAP_AUDIT_WRITE\",\n\t\t},\n\t\tAmbient: []string{\n\t\t\t\"CAP_CHOWN\",\n\t\t\t\"CAP_DAC_OVERRIDE\",\n\t\t\t\"CAP_FSETID\",\n\t\t\t\"CAP_FOWNER\",\n\t\t\t\"CAP_MKNOD\",\n\t\t\t\"CAP_NET_RAW\",\n\t\t\t\"CAP_SETGID\",\n\t\t\t\"CAP_SETUID\",\n\t\t\t\"CAP_SETFCAP\",\n\t\t\t\"CAP_SETPCAP\",\n\t\t\t\"CAP_NET_BIND_SERVICE\",\n\t\t\t\"CAP_SYS_CHROOT\",\n\t\t\t\"CAP_KILL\",\n\t\t\t\"CAP_AUDIT_WRITE\",\n\t\t},\n\t}\n\tconfig.Mounts = []specs.Mount{\n\t\t{\n\t\t\tDestination: \"/proc\",\n\t\t\tType: \"proc\",\n\t\t\tSource: \"proc\",\n\t\t\tOptions: []string{\"nosuid\", \"noexec\", \"nodev\"},\n\t\t},\n\t\t{\n\t\t\tDestination: \"/dev\",\n\t\t\tType: \"tmpfs\",\n\t\t\tSource: \"tmpfs\",\n\t\t\tOptions: []string{\"nosuid\", \"strictatime\", \"mode=755\", \"size=65536k\"},\n\t\t},\n\t\t{\n\t\t\tDestination: \"/dev/pts\",\n\t\t\tType: \"devpts\",\n\t\t\tSource: \"devpts\",\n\t\t\tOptions: []string{\"nosuid\", \"noexec\", \"newinstance\", \"ptmxmode=0666\", \"mode=0620\", \"gid=5\"},\n\t\t},\n\t\t{\n\t\t\tDestination: \"/dev/shm\",\n\t\t\tType: \"tmpfs\",\n\t\t\tSource: \"shm\",\n\t\t\tOptions: []string{\"nosuid\", \"noexec\", \"nodev\", \"mode=1777\", \"size=65536k\"},\n\t\t},\n\t\t{\n\t\t\tDestination: \"/dev/mqueue\",\n\t\t\tType: \"mqueue\",\n\t\t\tSource: \"mqueue\",\n\t\t\tOptions: []string{\"nosuid\", \"noexec\", \"nodev\"},\n\t\t},\n\t\t{\n\t\t\tDestination: \"/sys\",\n\t\t\tType: \"sysfs\",\n\t\t\tSource: \"sysfs\",\n\t\t\tOptions: []string{\"nosuid\", \"noexec\", \"nodev\", \"ro\"},\n\t\t},\n\t}\n\tconfig.Linux = &specs.Linux{\n\t\tResources: &specs.LinuxResources{\n\t\t\tDevices: []specs.LinuxDeviceCgroup{\n\t\t\t\t{\n\t\t\t\t\tAllow: false,\n\t\t\t\t\tAccess: \"rwm\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tNamespaces: []specs.LinuxNamespace{\n\t\t\t{\n\t\t\t\tType: \"pid\",\n\t\t\t},\n\t\t\t{\n\t\t\t\tType: \"network\",\n\t\t\t},\n\t\t\t{\n\t\t\t\tType: \"ipc\",\n\t\t\t},\n\t\t\t{\n\t\t\t\tType: \"uts\",\n\t\t\t},\n\t\t\t{\n\t\t\t\tType: \"mount\",\n\t\t\t},\n\t\t},\n\t}\n\n\tif seccomp.Enabled() {\n\t\tconfig.Linux.Seccomp, err = cseccomp.GetDefaultProfile(&config)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"failed to get seccomp default profile: %s\", err)\n\t\t}\n\t}\n\n\treturn &generate.Generator{Config: &config}, nil\n}", "func (ts *tester) createConfigMap() error {\n\tts.cfg.Logger.Info(\"creating config map\")\n\n\tb, err := ioutil.ReadFile(ts.cfg.EKSConfig.KubeConfigPath)\n\tif err != nil {\n\t\treturn err\n\t}\n\tctx, cancel := context.WithTimeout(context.Background(), time.Minute)\n\t_, err = ts.cfg.K8SClient.KubernetesClientSet().\n\t\tCoreV1().\n\t\tConfigMaps(ts.cfg.EKSConfig.AddOnStresserRemote.Namespace).\n\t\tCreate(\n\t\t\tctx,\n\t\t\t&v1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: stresserKubeConfigConfigMapName,\n\t\t\t\t\tNamespace: ts.cfg.EKSConfig.AddOnStresserRemote.Namespace,\n\t\t\t\t\tLabels: map[string]string{\n\t\t\t\t\t\t\"name\": stresserKubeConfigConfigMapName,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{\n\t\t\t\t\tstresserKubeConfigConfigMapFileName: string(b),\n\t\t\t\t},\n\t\t\t},\n\t\t\tmetav1.CreateOptions{},\n\t\t)\n\tcancel()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tts.cfg.Logger.Info(\"created config map\")\n\tts.cfg.EKSConfig.Sync()\n\treturn nil\n}", "func TestDefaultConfigMap(t *testing.T) {\n\ttype args struct {\n\t\toperatorConfig *operatorv1.Console\n\t\tconsoleConfig *configv1.Console\n\t\tmanagedConfig *corev1.ConfigMap\n\t\tmonitoringSharedConfig *corev1.ConfigMap\n\t\tinfrastructureConfig *configv1.Infrastructure\n\t\trt *routev1.Route\n\t\tuseDefaultCAFile bool\n\t\tinactivityTimeoutSeconds int\n\t\tavailablePlugins []*v1.ConsolePlugin\n\t\tnodeArchitectures []string\n\t\tnodeOperatingSystems []string\n\t\tcopiedCSVsDisabled bool\n\t}\n\tt.Setenv(\"RELEASE_VERSION\", testReleaseVersion)\n\ttests := []struct {\n\t\tname string\n\t\targs args\n\t\twant *corev1.ConfigMap\n\t}{\n\t\t{\n\t\t\tname: \"Test default configmap, no customization\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t\tControlPlaneTopology: configv1.HighlyAvailableTopologyMode,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n controlPlaneTopology: HighlyAvailable\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test configmap with oauth-serving-cert\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: false,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/oauth-serving-cert/ca-bundle.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test managed config to override default config\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{\n\t\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\n`,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t\tnodeArchitectures: []string{\"amd64\", \"arm64\"},\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\n nodeArchitectures:\n - amd64\n - arm64\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test nodeOperatingSystems config\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{\n\t\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\n`,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t\tnodeOperatingSystems: []string{\"foo\", \"bar\"},\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\n nodeOperatingSystems:\n - foo\n - bar\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config overriding default config and managed config\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{\n\t\t\t\t\tSpec: operatorv1.ConsoleSpec{\n\t\t\t\t\t\tOperatorSpec: operatorv1.OperatorSpec{},\n\t\t\t\t\t\tCustomization: operatorv1.ConsoleCustomization{\n\t\t\t\t\t\t\tBrand: operatorv1.BrandDedicated,\n\t\t\t\t\t\t\tDocumentationBaseURL: mockOperatorDocURL,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tStatus: operatorv1.ConsoleStatus{},\n\t\t\t\t},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{\n\t\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\n`,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + string(operatorv1.BrandDedicatedLegacy) + `\n documentationBaseURL: ` + mockOperatorDocURL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config with Custom Branding Values\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{\n\t\t\t\t\tSpec: operatorv1.ConsoleSpec{\n\t\t\t\t\t\tOperatorSpec: operatorv1.OperatorSpec{},\n\t\t\t\t\t\tCustomization: operatorv1.ConsoleCustomization{\n\t\t\t\t\t\t\tBrand: operatorv1.BrandDedicated,\n\t\t\t\t\t\t\tDocumentationBaseURL: mockOperatorDocURL,\n\t\t\t\t\t\t\tCustomProductName: \"custom-product-name\",\n\t\t\t\t\t\t\tCustomLogoFile: configv1.ConfigMapFileReference{\n\t\t\t\t\t\t\t\tName: \"custom-logo-file\",\n\t\t\t\t\t\t\t\tKey: \"logo.svg\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tStatus: operatorv1.ConsoleStatus{},\n\t\t\t\t},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{\n\t\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\n`,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + string(operatorv1.BrandDedicatedLegacy) + `\n documentationBaseURL: ` + mockOperatorDocURL + `\n customLogoFile: /var/logo/logo.svg\n customProductName: custom-product-name\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config with Statuspage pageID\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{\n\t\t\t\t\tSpec: operatorv1.ConsoleSpec{\n\t\t\t\t\t\tOperatorSpec: operatorv1.OperatorSpec{},\n\t\t\t\t\t\tCustomization: operatorv1.ConsoleCustomization{\n\t\t\t\t\t\t\tBrand: operatorv1.BrandDedicated,\n\t\t\t\t\t\t\tDocumentationBaseURL: mockOperatorDocURL,\n\t\t\t\t\t\t},\n\t\t\t\t\t\tProviders: operatorv1.ConsoleProviders{\n\t\t\t\t\t\t\tStatuspage: &operatorv1.StatuspageProvider{\n\t\t\t\t\t\t\t\tPageID: \"id-1234\",\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t\tStatus: operatorv1.ConsoleStatus{},\n\t\t\t\t},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{\n\t\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\ncustomization:\n branding: online\n documentationBaseURL: https://docs.okd.io/4.4/\n`,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + string(operatorv1.BrandDedicatedLegacy) + `\n documentationBaseURL: ` + mockOperatorDocURL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders:\n statuspageID: id-1234\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config with custom route hostname\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{\n\t\t\t\t\tSpec: operatorv1.ConsoleSpec{\n\t\t\t\t\t\tRoute: operatorv1.ConsoleConfigRoute{\n\t\t\t\t\t\t\tHostname: customHostname,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenshiftConsoleCustomRouteName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: customHostname,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: false,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/oauth-serving-cert/ca-bundle.crt\nclusterInfo:\n consoleBaseAddress: https://` + customHostname + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\n redirectPort: ` + strconv.Itoa(api.RedirectContainerPort) + `\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config, with inactivityTimeoutSeconds set\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 60,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n inactivityTimeoutSeconds: 60\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config, with enabledPlugins set\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t\tavailablePlugins: []*v1.ConsolePlugin{\n\t\t\t\t\ttestPluginsWithProxy(\"plugin1\", \"service1\", \"service-namespace1\"),\n\t\t\t\t\ttestPluginsWithProxy(\"plugin2\", \"service2\", \"service-namespace2\"),\n\t\t\t\t\ttestPluginsWithI18nPreloadType(\"plugin3\", \"service3\", \"service-namespace3\"),\n\t\t\t\t},\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\ni18nNamespaces:\n- plugin__plugin3\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\nplugins:\n plugin1: https://service1.service-namespace1.svc.cluster.local:8443/\n plugin2: https://service2.service-namespace2.svc.cluster.local:8443/\n plugin3: https://service3.service-namespace3.svc.cluster.local:8443/\nproxy:\n services:\n - authorize: true\n caCertificate: '-----BEGIN CERTIFICATE-----` + \"\\n\" + `\nMIICRzCCAfGgAwIBAgIJAIydTIADd+yqMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNV` + \"\\n\" + `\nBAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNVBAcMBkxvbmRvbjEYMBYGA1UE` + \"\\n\" + `\nCgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1JVCBEZXBhcnRtZW50MRswGQYD` + \"\\n\" + `\nVQQDDBJ0ZXN0LWNlcnRpZmljYXRlLTIwIBcNMTcwNDI2MjMyNDU4WhgPMjExNzA0` + \"\\n\" + `\nMDIyMzI0NThaMH4xCzAJBgNVBAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNV` + \"\\n\" + `\nBAcMBkxvbmRvbjEYMBYGA1UECgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1J` + \"\\n\" + `\nVCBEZXBhcnRtZW50MRswGQYDVQQDDBJ0ZXN0LWNlcnRpZmljYXRlLTIwXDANBgkq` + \"\\n\" + `\nhkiG9w0BAQEFAANLADBIAkEAuiRet28DV68Dk4A8eqCaqgXmymamUEjW/DxvIQqH` + \"\\n\" + `\n3lbhtm8BwSnS9wUAajSLSWiq3fci2RbRgaSPjUrnbOHCLQIDAQABo1AwTjAdBgNV` + \"\\n\" + `\nHQ4EFgQU0vhI4OPGEOqT+VAWwxdhVvcmgdIwHwYDVR0jBBgwFoAU0vhI4OPGEOqT` + \"\\n\" + `\n+VAWwxdhVvcmgdIwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAANBALNeJGDe` + \"\\n\" + `\nnV5cXbp9W1bC12Tc8nnNXn4ypLE2JTQAvyp51zoZ8hQoSnRVx/VCY55Yu+br8gQZ` + \"\\n\" + `\n+tW+O/PoE7B3tuY=` + \"\\n\" + `\n-----END CERTIFICATE-----'\n consoleAPIPath: /api/proxy/plugin/plugin1/plugin1-alias/\n endpoint: https://proxy-service1.proxy-service-namespace1.svc.cluster.local:9991\n - authorize: true\n caCertificate: '-----BEGIN CERTIFICATE-----` + \"\\n\" + `\nMIICRzCCAfGgAwIBAgIJAIydTIADd+yqMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNV` + \"\\n\" + `\nBAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNVBAcMBkxvbmRvbjEYMBYGA1UE` + \"\\n\" + `\nCgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1JVCBEZXBhcnRtZW50MRswGQYD` + \"\\n\" + `\nVQQDDBJ0ZXN0LWNlcnRpZmljYXRlLTIwIBcNMTcwNDI2MjMyNDU4WhgPMjExNzA0` + \"\\n\" + `\nMDIyMzI0NThaMH4xCzAJBgNVBAYTAkdCMQ8wDQYDVQQIDAZMb25kb24xDzANBgNV` + \"\\n\" + `\nBAcMBkxvbmRvbjEYMBYGA1UECgwPR2xvYmFsIFNlY3VyaXR5MRYwFAYDVQQLDA1J` + \"\\n\" + `\nVCBEZXBhcnRtZW50MRswGQYDVQQDDBJ0ZXN0LWNlcnRpZmljYXRlLTIwXDANBgkq` + \"\\n\" + `\nhkiG9w0BAQEFAANLADBIAkEAuiRet28DV68Dk4A8eqCaqgXmymamUEjW/DxvIQqH` + \"\\n\" + `\n3lbhtm8BwSnS9wUAajSLSWiq3fci2RbRgaSPjUrnbOHCLQIDAQABo1AwTjAdBgNV` + \"\\n\" + `\nHQ4EFgQU0vhI4OPGEOqT+VAWwxdhVvcmgdIwHwYDVR0jBBgwFoAU0vhI4OPGEOqT` + \"\\n\" + `\n+VAWwxdhVvcmgdIwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAANBALNeJGDe` + \"\\n\" + `\nnV5cXbp9W1bC12Tc8nnNXn4ypLE2JTQAvyp51zoZ8hQoSnRVx/VCY55Yu+br8gQZ` + \"\\n\" + `\n+tW+O/PoE7B3tuY=` + \"\\n\" + `\n-----END CERTIFICATE-----'\n consoleAPIPath: /api/proxy/plugin/plugin2/plugin2-alias/\n endpoint: https://proxy-service2.proxy-service-namespace2.svc.cluster.local:9991\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config, with 'External' ControlPlaneTopology\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t\tControlPlaneTopology: configv1.ExternalTopologyMode,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n controlPlaneTopology: External\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test operator config, with CopiedCSVsDisabled\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t\tControlPlaneTopology: configv1.ExternalTopologyMode,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tcopiedCSVsDisabled: true,\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n controlPlaneTopology: External\n releaseVersion: ` + testReleaseVersion + `\n copiedCSVsDisabled: true\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\t{\n\t\t\tname: \"Test default configmap with monitoring config\",\n\t\t\targs: args{\n\t\t\t\toperatorConfig: &operatorv1.Console{},\n\t\t\t\tconsoleConfig: &configv1.Console{},\n\t\t\t\tmanagedConfig: &corev1.ConfigMap{},\n\t\t\t\tinfrastructureConfig: &configv1.Infrastructure{\n\t\t\t\t\tStatus: configv1.InfrastructureStatus{\n\t\t\t\t\t\tAPIServerURL: mockAPIServer,\n\t\t\t\t\t\tControlPlaneTopology: configv1.HighlyAvailableTopologyMode,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\trt: &routev1.Route{\n\t\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\t\tName: api.OpenShiftConsoleName,\n\t\t\t\t\t},\n\t\t\t\t\tSpec: routev1.RouteSpec{\n\t\t\t\t\t\tHost: host,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t\tuseDefaultCAFile: true,\n\t\t\t\tinactivityTimeoutSeconds: 0,\n\t\t\t\tmonitoringSharedConfig: &corev1.ConfigMap{\n\t\t\t\t\tData: map[string]string{\n\t\t\t\t\t\t\"alertmanagerUserWorkloadHost\": \"alertmanager-user-workload.openshift-user-workload-monitoring.svc:9094\",\n\t\t\t\t\t\t\"alertmanagerTenancyHost\": \"alertmanager-user-workload.openshift-user-workload-monitoring.svc:9092\",\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t\twant: &corev1.ConfigMap{\n\t\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\t\tKind: \"ConfigMap\",\n\t\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\t},\n\t\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\t\tName: api.OpenShiftConsoleConfigMapName,\n\t\t\t\t\tNamespace: api.OpenShiftConsoleNamespace,\n\t\t\t\t\tLabels: map[string]string{\"app\": api.OpenShiftConsoleName},\n\t\t\t\t\tAnnotations: map[string]string{},\n\t\t\t\t},\n\t\t\t\tData: map[string]string{configKey: `kind: ConsoleConfig\napiVersion: console.openshift.io/v1\nauth:\n clientID: console\n clientSecretFile: /var/oauth-config/clientSecret\n oauthEndpointCAFile: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt\nclusterInfo:\n consoleBaseAddress: https://` + host + `\n masterPublicURL: ` + mockAPIServer + `\n controlPlaneTopology: HighlyAvailable\n releaseVersion: ` + testReleaseVersion + `\ncustomization:\n branding: ` + DEFAULT_BRAND + `\n documentationBaseURL: ` + DEFAULT_DOC_URL + `\nmonitoringInfo:\n alertmanagerTenancyHost: alertmanager-user-workload.openshift-user-workload-monitoring.svc:9092\n alertmanagerUserWorkloadHost: alertmanager-user-workload.openshift-user-workload-monitoring.svc:9094\nservingInfo:\n bindAddress: https://[::]:8443\n certFile: /var/serving-cert/tls.crt\n keyFile: /var/serving-cert/tls.key\nproviders: {}\n`,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\tfor _, tt := range tests {\n\t\tt.Run(tt.name, func(t *testing.T) {\n\t\t\tcm, _, _ := DefaultConfigMap(\n\t\t\t\ttt.args.operatorConfig,\n\t\t\t\ttt.args.consoleConfig,\n\t\t\t\ttt.args.managedConfig,\n\t\t\t\ttt.args.monitoringSharedConfig,\n\t\t\t\ttt.args.infrastructureConfig,\n\t\t\t\ttt.args.rt,\n\t\t\t\ttt.args.useDefaultCAFile,\n\t\t\t\ttt.args.inactivityTimeoutSeconds,\n\t\t\t\ttt.args.availablePlugins,\n\t\t\t\ttt.args.nodeArchitectures,\n\t\t\t\ttt.args.nodeOperatingSystems,\n\t\t\t\ttt.args.copiedCSVsDisabled,\n\t\t\t)\n\n\t\t\t// marshall the exampleYaml to map[string]interface{} so we can use it in diff below\n\t\t\tvar exampleConfig map[string]interface{}\n\t\t\texampleBytes := []byte(tt.want.Data[configKey])\n\t\t\terr := yaml.Unmarshal(exampleBytes, &exampleConfig)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(err)\n\t\t\t}\n\n\t\t\t// the reason we have to marshall blindly into map[string]interface{}\n\t\t\t// is that we don't have the definition for the console config struct.\n\t\t\t// it exists in the console repo under cmd/bridge/config.go and is not\n\t\t\t// available as an api object\n\t\t\tvar actualConfig map[string]interface{}\n\t\t\t// convert the string back into a []byte\n\t\t\tconfigBytes := []byte(cm.Data[configKey])\n\n\t\t\terr = yaml.Unmarshal(configBytes, &actualConfig)\n\t\t\tif err != nil {\n\t\t\t\tt.Error(\"Problem with consoleConfig.Data[console-config.yaml]\", err)\n\t\t\t}\n\n\t\t\t// compare the configs\n\t\t\tif diff := deep.Equal(exampleConfig, actualConfig); diff != nil {\n\t\t\t\tfmt.Printf(\"\\n EXAMPLE: %#v\\n\\n ACTUAL: %#v\\n\", exampleConfig, actualConfig)\n\t\t\t\tt.Error(diff)\n\t\t\t}\n\n\t\t\t// nil them out, we already compared them, and unfortunately we can't trust\n\t\t\t// that the ordering will be stable. this avoids a flaky test.\n\t\t\tcm.Data = nil\n\t\t\ttt.want.Data = nil\n\n\t\t\t// and then we can test the rest of the struct\n\t\t\tif diff := deep.Equal(cm, tt.want); diff != nil {\n\t\t\t\tt.Error(diff)\n\t\t\t}\n\t\t})\n\t}\n}", "func GenerateCA(commonName string,\n serialNumber int64,\n countryCode string,\n organizationalUnit string,\n algo string,\n ecCurve string) (rootCADER []byte, rootPrivateKeyDER []byte, err error) {\n\n notBefore := time.Now().UTC()\n notAfter := notBefore.AddDate(CAValidity, 0, 0) // (years, months. days)\n\n // Hashing algorithm should match the private key type that signs the certificate.\n // In this case we are self-signing so the key generation algorithm and signature hashing algorithm are both of the same type\n hashingAlgorithm := x509.SHA256WithRSA\n switch strings.ToUpper(algo) {\n case \"RSA\":\n // pass\n case \"ECDSA\":\n hashingAlgorithm = x509.ECDSAWithSHA256\n default:\n return nil, nil, errors.New(\"Unrecognized algorithm, valid options are RSA and ECDSA\")\n }\n\n // https://golang.org/pkg/crypto/x509/#Certificate\n myCACertTemplate := x509.Certificate{\n\n // https://golang.org/pkg/crypto/x509/pkix/#Name\n Subject: pkix.Name{\n CommonName: commonName,\n Country: []string{countryCode},\n Organization: []string{organizationalUnit},\n },\n\n NotBefore: notBefore,\n NotAfter: notAfter,\n SerialNumber: big.NewInt(serialNumber), // returns *big.Int\n KeyUsage: RootCAKeyUsage,\n\n // For CAs we at least want []x509.ExtKeyUsage{x509.ExtKeyUsageAny | x509.KeyUsageCertSign}\n // More info: https://golang.org/pkg/crypto/x509/#ExtKeyUsage\n ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, // this should work\n BasicConstraintsValid: true,\n IsCA: true,\n MaxPathLen: 3, // 1 is enough for our purpose\n SignatureAlgorithm: hashingAlgorithm, // other options are at https://golang.org/pkg/crypto/x509/#SignatureAlgorithm\n }\n\n privKey, pubKey, err := generateKeyPair(algo, ecCurve)\n if err != nil {\n return nil, nil, err\n }\n\n // https://golang.org/pkg/crypto/x509/#CreateCertificate\n // Both the signee and singer are the same template because rootCAs are always self-signed\n rootCADER, err = x509.CreateCertificate(rand.Reader, &myCACertTemplate, &myCACertTemplate, pubKey, privKey)\n if err != nil {\n return nil, nil, err\n }\n\n rootPrivateKeyDER, err = MarshalPrivateKey(privKey)\n\n return rootCADER, rootPrivateKeyDER, err\n}", "func CreateDefaultFlags(ctx *E2EContext) {\n\tflag.StringVar(&ctx.Settings.ConfigPath, \"config-path\", \"\", \"path to the e2e config file\")\n\tflag.StringVar(&ctx.Settings.ArtifactFolder, \"artifacts-folder\", \"\", \"folder where e2e test artifact should be stored\")\n\tflag.BoolVar(&ctx.Settings.UseCIArtifacts, \"kubetest.use-ci-artifacts\", false, \"use the latest build from the main branch of the Kubernetes repository\")\n\tflag.StringVar(&ctx.Settings.KubetestConfigFilePath, \"kubetest.config-file\", \"\", \"path to the kubetest configuration file\")\n\tflag.IntVar(&ctx.Settings.GinkgoNodes, \"kubetest.ginkgo-nodes\", 1, \"number of ginkgo nodes to use\")\n\tflag.IntVar(&ctx.Settings.GinkgoSlowSpecThreshold, \"kubetest.ginkgo-slowSpecThreshold\", 120, \"time in s before spec is marked as slow\")\n\tflag.BoolVar(&ctx.Settings.UseExistingCluster, \"use-existing-cluster\", false, \"if true, the test uses the current cluster instead of creating a new one (default discovery rules apply)\")\n\tflag.BoolVar(&ctx.Settings.SkipCleanup, \"skip-cleanup\", false, \"if true, the resource cleanup after tests will be skipped\")\n\tflag.BoolVar(&ctx.Settings.SkipCloudFormationDeletion, \"skip-cloudformation-deletion\", false, \"if true, an AWS CloudFormation stack will not be deleted\")\n\tflag.BoolVar(&ctx.Settings.SkipCloudFormationCreation, \"skip-cloudformation-creation\", false, \"if true, an AWS CloudFormation stack will not be created\")\n\tflag.StringVar(&ctx.Settings.DataFolder, \"data-folder\", \"\", \"path to the data folder\")\n\tflag.StringVar(&ctx.Settings.SourceTemplate, \"source-template\", \"infrastructure-aws/cluster-template.yaml\", \"path to the data folder\")\n}", "func createCertificates(_ *testing.T) error {\n\tvar err error\n\tvar srcCaCrt *os.File\n\tvar srcTLSCrt *os.File\n\tvar srcTLSKey *os.File\n\tvar destCaCrt *os.File\n\tvar destTLSCrt *os.File\n\tvar destTLSKey *os.File\n\n\tdir := \"/tmp/k8s-webhook-server/serving-certs\"\n\n\t// create directory if not existing yet\n\t_ = os.Mkdir(\"/tmp/k8s-webhook-server\", os.ModePerm)\n\t_ = os.Mkdir(dir, os.ModePerm)\n\n\t// open src files\n\tif srcCaCrt, err = os.Open(\"../../test/certs/ca.crt\"); err != nil {\n\t\treturn err\n\t}\n\tdefer srcCaCrt.Close()\n\tif srcTLSCrt, err = os.Open(\"../../test/certs/tls.crt\"); err != nil {\n\t\treturn err\n\t}\n\tdefer srcTLSCrt.Close()\n\tif srcTLSKey, err = os.Open(\"../../test/certs/tls.key\"); err != nil {\n\t\treturn err\n\t}\n\tdefer srcTLSKey.Close()\n\n\t// open dest files\n\tif destCaCrt, err = os.Create(fmt.Sprintf(\"%s/%s\", dir, \"ca.crt\")); err != nil {\n\t\treturn err\n\t}\n\tdefer destCaCrt.Close()\n\tif destTLSCrt, err = os.Create(fmt.Sprintf(\"%s/%s\", dir, \"tls.crt\")); err != nil {\n\t\treturn err\n\t}\n\tdefer destTLSCrt.Close()\n\tif destTLSKey, err = os.Create(fmt.Sprintf(\"%s/%s\", dir, \"tls.key\")); err != nil {\n\t\treturn err\n\t}\n\tdefer destTLSKey.Close()\n\n\t// copy ca.crt\n\tif _, err := io.Copy(destCaCrt, srcCaCrt); err != nil {\n\t\treturn err\n\t}\n\t// copy tls.crt\n\tif _, err := io.Copy(destTLSCrt, srcTLSCrt); err != nil {\n\t\treturn err\n\t}\n\t// copy tls.key\n\tif _, err := io.Copy(destTLSKey, srcTLSKey); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func defaultConfig() interface{} {\n\treturn &config{\n\t\tPools: make(pools),\n\t\tConfDirPath: \"/etc/cmk\",\n\t}\n}", "func (c *networkPolicyController) initializeDefaultAzureNpmChain() error {\n\tif c.isAzureNpmChainCreated {\n\t\treturn nil\n\t}\n\n\tipsMgr := c.npMgr.NsMap[util.KubeAllNamespacesFlag].IpsMgr\n\tiptMgr := c.npMgr.NsMap[util.KubeAllNamespacesFlag].iptMgr\n\tif err := ipsMgr.CreateSet(util.KubeSystemFlag, []string{util.IpsetNetHashFlag}); err != nil {\n\t\treturn fmt.Errorf(\"[initializeDefaultAzureNpmChain] Error: failed to initialize kube-system ipset with err %s\", err)\n\t}\n\tif err := iptMgr.InitNpmChains(); err != nil {\n\t\treturn fmt.Errorf(\"[initializeDefaultAzureNpmChain] Error: failed to initialize azure-npm chains with err %s\", err)\n\t}\n\n\tc.isAzureNpmChainCreated = true\n\treturn nil\n}", "func genCertsIfMIssing(\n\tt *testing.T,\n\tcapem string,\n\tcakey string,\n) error {\n\t_, err := os.Stat(capem)\n\tif err == nil {\n\t\t_, err = os.Stat(cakey)\n\t\tif err == nil {\n\t\t\treturn nil\n\t\t}\n\t}\n\n\tserialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)\n\tserialNumber, err := rand.Int(rand.Reader, serialNumberLimit)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to generate serial number: %s\", err)\n\t}\n\n\tcaTemplate := x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tSubject: pkix.Name{\n\t\t\tCountry: []string{\"US\"},\n\t\t\tOrganization: []string{\"elastic\"},\n\t\t\tOrganizationalUnit: []string{\"beats\"},\n\t\t},\n\t\tIssuer: pkix.Name{\n\t\t\tCountry: []string{\"US\"},\n\t\t\tOrganization: []string{\"elastic\"},\n\t\t\tOrganizationalUnit: []string{\"beats\"},\n\t\t\tLocality: []string{\"locality\"},\n\t\t\tProvince: []string{\"province\"},\n\t\t\tStreetAddress: []string{\"Mainstreet\"},\n\t\t\tPostalCode: []string{\"12345\"},\n\t\t\tSerialNumber: \"23\",\n\t\t\tCommonName: \"*\",\n\t\t},\n\t\tIPAddresses: []net.IP{\n\t\t\tnet.IP{127, 0, 0, 1},\n\t\t},\n\n\t\tSignatureAlgorithm: x509.SHA512WithRSA,\n\t\tPublicKeyAlgorithm: x509.ECDSA,\n\t\tNotBefore: time.Now(),\n\t\tNotAfter: time.Now().AddDate(10, 0, 0),\n\t\tSubjectKeyId: []byte(\"12345\"),\n\t\tBasicConstraintsValid: true,\n\t\tIsCA: true,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{\n\t\t\tx509.ExtKeyUsageClientAuth,\n\t\t\tx509.ExtKeyUsageServerAuth},\n\t\tKeyUsage: x509.KeyUsageKeyEncipherment |\n\t\t\tx509.KeyUsageDigitalSignature |\n\t\t\tx509.KeyUsageCertSign,\n\t}\n\n\t// generate keys\n\tpriv, err := rsa.GenerateKey(rand.Reader, 4096)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to generate ca private key: %v\", err)\n\t}\n\tpub := &priv.PublicKey\n\n\t// generate certificate\n\tcaBytes, err := x509.CreateCertificate(\n\t\trand.Reader,\n\t\t&caTemplate,\n\t\t&caTemplate,\n\t\tpub, priv)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to generate ca certificate: %v\", err)\n\t}\n\n\t// write key file\n\tkeyOut, err := os.OpenFile(cakey, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to open key file for writing: %v\", err)\n\t}\n\tpem.Encode(keyOut, &pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(priv)})\n\tkeyOut.Close()\n\n\t// write certificate\n\tcertOut, err := os.Create(capem)\n\tif err != nil {\n\t\tt.Fatalf(\"failed to open cert.pem for writing: %s\", err)\n\t}\n\tpem.Encode(certOut, &pem.Block{Type: \"CERTIFICATE\", Bytes: caBytes})\n\tcertOut.Close()\n\n\treturn nil\n}", "func createConfigMapFunc(f *framework.Framework, tc *nodeConfigTestCase) error {\n\ttc.configMap.ResourceVersion = \"\"\n\tcm, err := f.ClientSet.CoreV1().ConfigMaps(tc.configMap.Namespace).Create(tc.configMap)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// update tc.configMap's UID and ResourceVersion to match the new ConfigMap, this makes\n\t// sure our derived status checks have up-to-date information\n\ttc.configMap.UID = cm.UID\n\ttc.configMap.ResourceVersion = cm.ResourceVersion\n\treturn nil\n}", "func DefaultConfigMapName(controllerName string) string {\n\treturn fmt.Sprintf(\"%s-configmap\", controllerName)\n}", "func DefaultConfigMapName(controllerName string) string {\n\treturn fmt.Sprintf(\"%s-configmap\", controllerName)\n}", "func createAWSKMSClients(arnMap map[string]string) ([]AWSKMSClient, error) {\n\tsess, err := session.NewSession()\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"unable to create new session\")\n\t}\n\n\tclients := make([]AWSKMSClient, 0)\n\n\tfor region, arn := range arnMap {\n\t\tclients = append(clients, newAWSKMSClient(sess, region, arn))\n\t}\n\n\treturn clients, nil\n}", "func ArbitrateConfigs(c *Configure) {\n\t//check the ClusterName, ClusterName is used to Identify the clusters in the Local NetWork\n\tif c.HttpPort == c.MsgPort {\n\t\tpanic(\"port conflict\")\n\t}\n\tif c.HttpPort > math.MaxInt16 || c.HttpPort < 1024 {\n\t\tpanic(fmt.Errorf(\"illegal http port %d\", c.HttpPort))\n\t}\n\n\tif c.MsgPort > math.MaxInt16 || c.MsgPort < 1024 {\n\t\tpanic(fmt.Errorf(\"illegal msg port %d\", c.MsgPort))\n\t}\n\n\tif c.Retry > 10 {\n\t\tc.Retry = 10\n\t}\n\tif c.Retry < 1 {\n\t\tc.Retry = 1\n\t}\n\tif c.SyncType < 0 || c.SyncType > 2 {\n\t\tc.SyncType = 0\n\t}\n\tif c.Threshold < 1000 {\n\t\tc.Threshold = 1000\n\t}\n\tif c.Threshold > 1000000 {\n\t\tc.Threshold = 1000000\n\t}\n}", "func EnsureDefaults(cfg *config.Config) {\n\t// provide with defaults for shared logging, since we need a valid destination address for \"envdecode\".\n\tif cfg.Log == nil && cfg.Commons != nil && cfg.Commons.Log != nil {\n\t\tcfg.Log = &config.Log{\n\t\t\tLevel: cfg.Commons.Log.Level,\n\t\t\tPretty: cfg.Commons.Log.Pretty,\n\t\t\tColor: cfg.Commons.Log.Color,\n\t\t\tFile: cfg.Commons.Log.File,\n\t\t}\n\t} else if cfg.Log == nil {\n\t\tcfg.Log = &config.Log{}\n\t}\n\t// provide with defaults for shared tracing, since we need a valid destination address for \"envdecode\".\n\tif cfg.Tracing == nil && cfg.Commons != nil && cfg.Commons.Tracing != nil {\n\t\tcfg.Tracing = &config.Tracing{\n\t\t\tEnabled: cfg.Commons.Tracing.Enabled,\n\t\t\tType: cfg.Commons.Tracing.Type,\n\t\t\tEndpoint: cfg.Commons.Tracing.Endpoint,\n\t\t\tCollector: cfg.Commons.Tracing.Collector,\n\t\t}\n\t} else if cfg.Tracing == nil {\n\t\tcfg.Tracing = &config.Tracing{}\n\t}\n\n\tif cfg.Reva == nil && cfg.Commons != nil {\n\t\tcfg.Reva = structs.CopyOrZeroValue(cfg.Commons.Reva)\n\t}\n\n\tif cfg.TokenManager == nil && cfg.Commons != nil && cfg.Commons.TokenManager != nil {\n\t\tcfg.TokenManager = &config.TokenManager{\n\t\t\tJWTSecret: cfg.Commons.TokenManager.JWTSecret,\n\t\t}\n\t} else if cfg.TokenManager == nil {\n\t\tcfg.TokenManager = &config.TokenManager{}\n\t}\n\n\tif cfg.GRPC.TLS == nil && cfg.Commons != nil {\n\t\tcfg.GRPC.TLS = structs.CopyOrZeroValue(cfg.Commons.GRPCServiceTLS)\n\t}\n}", "func CreateDefaultUserConfigFile(keyPath string) error {\n\tuser, err := GetDefaultSSHUser()\n\tif err != nil {\n\t\treturn err\n\t}\n\tif user == \"\" {\n\t\treturn nil\n\t}\n\n\terr = MakeDotSSH()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif _, err := os.Stat(shared.ExpandPathWithTilde(\"~/.ssh/config\")); os.IsNotExist(err) {\n\t\tf, err := os.OpenFile(shared.ExpandPathWithTilde(\"~/.ssh/config\"), os.O_RDONLY|os.O_CREATE, 0644)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"failed to touch ~/.ssh/config: %v\", err)\n\t\t}\n\t\tf.Close()\n\t}\n\n\t// This config file sets a default ssh user and a default ssh key. This ensures that kssh's signed key will be used.\n\tconfig := fmt.Sprintf(\"# kssh config file to set a default SSH user\\n\"+\n\t\t\"Include config\\n\"+\n\t\t\"Host *\\n\"+\n\t\t\" User %s\\n\"+\n\t\t\" IdentityFile %s\\n\"+\n\t\t\" IdentitiesOnly yes\\n\", user, keyPath)\n\n\tf, err := os.OpenFile(AlternateSSHConfigFile, os.O_CREATE|os.O_WRONLY, 0644)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Close()\n\t_, err = f.WriteString(config)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func createProviders(tmpFiles *tmpCredsFiles) (certprovider.Provider, certprovider.Provider, certprovider.Provider, certprovider.Provider, error) {\n\tclientIdentityOptions := pemfile.Options{\n\t\tCertFile: tmpFiles.clientCertTmp.Name(),\n\t\tKeyFile: tmpFiles.clientKeyTmp.Name(),\n\t\tRefreshDuration: credRefreshingInterval,\n\t}\n\tclientIdentityProvider, err := pemfile.NewProvider(clientIdentityOptions)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\tclientRootOptions := pemfile.Options{\n\t\tRootFile: tmpFiles.clientTrustTmp.Name(),\n\t\tRefreshDuration: credRefreshingInterval,\n\t}\n\tclientRootProvider, err := pemfile.NewProvider(clientRootOptions)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\tserverIdentityOptions := pemfile.Options{\n\t\tCertFile: tmpFiles.serverCertTmp.Name(),\n\t\tKeyFile: tmpFiles.serverKeyTmp.Name(),\n\t\tRefreshDuration: credRefreshingInterval,\n\t}\n\tserverIdentityProvider, err := pemfile.NewProvider(serverIdentityOptions)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\tserverRootOptions := pemfile.Options{\n\t\tRootFile: tmpFiles.serverTrustTmp.Name(),\n\t\tRefreshDuration: credRefreshingInterval,\n\t}\n\tserverRootProvider, err := pemfile.NewProvider(serverRootOptions)\n\tif err != nil {\n\t\treturn nil, nil, nil, nil, err\n\t}\n\treturn clientIdentityProvider, clientRootProvider, serverIdentityProvider, serverRootProvider, nil\n}", "func (o Options) ConstructClientConfigs() (mgmtKubeCfg, remoteKubeCfg clientcmd.ClientConfig, err error) {\n\tmgmtKubeCfg, err = kubeconfig.GetClientConfigWithContext(o.KubeConfigPath, o.MgmtContext, \"\")\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tremoteKubeCfg, err = kubeconfig.GetClientConfigWithContext(o.KubeConfigPath, o.RemoteContext, \"\")\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\treturn mgmtKubeCfg, remoteKubeCfg, nil\n}", "func GenerateBinaries(c Calls, provider fsProviderFn) map[string]string {\n\t// Load all binaries\n\tbinaries := make(map[string]string)\n\tfor project, config := range c {\n\t\tbinaries[project] = loadBinary(provider, *config)\n\t}\n\treturn binaries\n}", "func createContrailConfig(fqNameTable *FQNameTableType, tp, name, parentType string, fqName []string) (*ContrailConfig, error) {\n\tu, err := uuid.NewUUID()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tus := u.String()\n\tif (*fqNameTable)[tp] == nil {\n\t\t(*fqNameTable)[tp] = map[string]string{}\n\t}\n\tt := time.Now().String()\n\tts := strings.ReplaceAll(t, \" \", \"T\")\n\tc := ContrailConfig{\n\t\tUUID: us,\n\t\tType: tp,\n\t\tParentType: parentType,\n\t\tDisplayName: name,\n\t\tPerms2: types.PermType2{\n\t\t\tOwner: \"cloud-admin\",\n\t\t\tOwnerAccess: 7,\n\t\t\tGlobalAccess: 5,\n\t\t},\n\t\tIdPerms: types.IdPermsType{\n\t\t\tEnable: true,\n\t\t\tUuid: &types.UuidType{\n\t\t\t\tUuidMslong: binary.BigEndian.Uint64(u[:8]),\n\t\t\t\tUuidLslong: binary.BigEndian.Uint64(u[8:]),\n\t\t\t},\n\t\t\tCreated: ts,\n\t\t\tLastModified: ts,\n\t\t\tUserVisible: true,\n\t\t\tPermissions: &types.PermType{\n\t\t\t\tOwner: \"cloud-admin\",\n\t\t\t\tOwnerAccess: 7,\n\t\t\t\tOtherAccess: 7,\n\t\t\t\tGroup: \"cloud-admin-group\",\n\t\t\t\tGroupAccess: 7,\n\t\t\t},\n\t\t\tDescription: \"\",\n\t\t\tCreator: \"\",\n\t\t},\n\t\tFqName: fqName,\n\t}\n\t(*fqNameTable)[tp][fmt.Sprintf(\"%s:%s\", strings.Join(fqName, \":\"), us)] = \"null\"\n\treturn &c, nil\n}", "func initializeDefaults() error {\n\tif err := AddDefaultProfiles(); err != nil {\n\t\tlogger.Get().Error(\"Default Storage profiles create failed: %v\", err)\n\t\treturn err\n\t}\n\treturn nil\n}", "func sshDefaultConfig(userName, identity string) (*ssh.ClientConfig, error) {\n\tcontents, err := loadIdentity(userName, identity)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsigner, err := ssh.ParsePrivateKey(contents)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &ssh.ClientConfig{\n\t\tUser: userName,\n\t\tAuth: []ssh.AuthMethod{\n\t\t\tssh.PublicKeys(signer),\n\t\t},\n\t}, nil\n}", "func defaultConfigs() lint.Configs {\n\treturn lint.Configs{}\n}", "func GetDefaultCaCertFilePath() string {\n\tif runtime.GOOS == \"windows\" {\n\t\treturn windowsServerCaCertPath\n\t}\n\treturn nixServerCaCertPath\n}", "func createConfigFile(filename string) {\n\tcontents := `; Default .skicka.config file. See \n; https://github.com/google/skicka/blob/master/README.md for more\n; information about setting up skicka.\n[google]\n\tclientid=YOUR_GOOGLE_APP_CLIENT_ID\n\tclientsecret=YOUR_GOOGLE_APP_SECRET\n[encryption]\n ; Run 'skicka genkey' to generate an encyption key.\n\t;salt=\n\t;passphrase-hash=\n\t;encrypted-key=\n\t;encrypted-key-iv=\n[upload]\n\t; You may want to specify regular expressions to match local filenames\n\t; that you want to be ignored by 'skicka upload'. Use one ignored-regexp\n ; line for each such regular expression.\n\t;ignored-regexp=\"\\\\.o$\"\n\t;ignored-regexp=~$\n\t;\n\t; To limit upload bandwidth, you can set the maximum (average)\n\t; bytes per second that will be used for uploads\n\t;bytes-per-second-limit=524288 ; 512kB\n`\n\t// Don't overwrite an already-existing configuration file.\n\tif _, err := os.Stat(filename); os.IsNotExist(err) {\n\t\terr := ioutil.WriteFile(filename, []byte(contents), 0600)\n\t\tif err != nil {\n\t\t\tprintErrorAndExit(fmt.Errorf(\"skicka: unable to create \"+\n\t\t\t\t\"configuration file %s: %v\\n\", filename, err))\n\t\t}\n\t\tfmt.Printf(\"skicka: created configuration file %s.\\n\", filename)\n\t} else {\n\t\tprintErrorAndExit(fmt.Errorf(\"skicka: %s: file already exists; \"+\n\t\t\t\"leaving it alone.\\n\", filename))\n\t}\n}", "func DefaultConfig() *config.Config {\n\treturn &config.Config{\n\t\tDebug: config.Debug{\n\t\t\tAddr: \"127.0.0.1:9147\",\n\t\t\tToken: \"\",\n\t\t\tPprof: false,\n\t\t\tZpages: false,\n\t\t},\n\t\tGRPC: config.GRPCConfig{\n\t\t\tAddr: \"127.0.0.1:9146\",\n\t\t\tNamespace: \"com.owncloud.api\",\n\t\t\tProtocol: \"tcp\",\n\t\t},\n\t\tService: config.Service{\n\t\t\tName: \"auth-basic\",\n\t\t},\n\t\tReva: shared.DefaultRevaConfig(),\n\t\tAuthProvider: \"ldap\",\n\t\tAuthProviders: config.AuthProviders{\n\t\t\tLDAP: config.LDAPProvider{\n\t\t\t\tURI: \"ldaps://localhost:9235\",\n\t\t\t\tCACert: filepath.Join(defaults.BaseDataPath(), \"idm\", \"ldap.crt\"),\n\t\t\t\tInsecure: false,\n\t\t\t\tUserBaseDN: \"ou=users,o=libregraph-idm\",\n\t\t\t\tGroupBaseDN: \"ou=groups,o=libregraph-idm\",\n\t\t\t\tUserScope: \"sub\",\n\t\t\t\tGroupScope: \"sub\",\n\t\t\t\tLoginAttributes: []string{\"uid\"},\n\t\t\t\tUserFilter: \"\",\n\t\t\t\tGroupFilter: \"\",\n\t\t\t\tUserObjectClass: \"inetOrgPerson\",\n\t\t\t\tGroupObjectClass: \"groupOfNames\",\n\t\t\t\tBindDN: \"uid=reva,ou=sysusers,o=libregraph-idm\",\n\t\t\t\tDisableUserMechanism: \"attribute\",\n\t\t\t\tLdapDisabledUsersGroupDN: \"cn=DisabledUsersGroup,ou=groups,o=libregraph-idm\",\n\t\t\t\tIDP: \"https://localhost:9200\",\n\t\t\t\tUserSchema: config.LDAPUserSchema{\n\t\t\t\t\tID: \"ownclouduuid\",\n\t\t\t\t\tMail: \"mail\",\n\t\t\t\t\tDisplayName: \"displayname\",\n\t\t\t\t\tUsername: \"uid\",\n\t\t\t\t\tEnabled: \"ownCloudUserEnabled\",\n\t\t\t\t},\n\t\t\t\tGroupSchema: config.LDAPGroupSchema{\n\t\t\t\t\tID: \"ownclouduuid\",\n\t\t\t\t\tMail: \"mail\",\n\t\t\t\t\tDisplayName: \"cn\",\n\t\t\t\t\tGroupname: \"cn\",\n\t\t\t\t\tMember: \"member\",\n\t\t\t\t},\n\t\t\t},\n\t\t\tJSON: config.JSONProvider{},\n\t\t\tOwnCloudSQL: config.OwnCloudSQLProvider{\n\t\t\t\tDBUsername: \"owncloud\",\n\t\t\t\tDBHost: \"mysql\",\n\t\t\t\tDBPort: 3306,\n\t\t\t\tDBName: \"owncloud\",\n\t\t\t\tIDP: \"https://localhost:9200\",\n\t\t\t\tNobody: 90,\n\t\t\t\tJoinUsername: false,\n\t\t\t\tJoinOwnCloudUUID: false,\n\t\t\t},\n\t\t},\n\t}\n}", "func NewDefaultSourcesWithDefaults(filePaths []string, prefix, delimiter string, defaults Source, additionalSources ...Source) (sources []Source) {\n\tsources = []Source{defaults}\n\n\tsources = append(sources, NewDefaultSources(filePaths, prefix, delimiter, additionalSources...)...)\n\n\treturn sources\n}", "func GetAllConfigs(\n\ttoData *t3cutil.ConfigData,\n\tcfg config.Cfg,\n) ([]t3cutil.ATSConfigFile, error) {\n\tif toData.Server.HostName == nil {\n\t\treturn nil, errors.New(\"server hostname is nil\")\n\t}\n\n\tconfigFiles, warnings, err := MakeConfigFilesList(toData, cfg.Dir, cfg.ATSMajorVersion)\n\tlogWarnings(\"generating config files list: \", warnings)\n\tif err != nil {\n\t\treturn nil, errors.New(\"creating meta: \" + err.Error())\n\t}\n\n\tgenTime := time.Now()\n\thdrCommentTxt := makeHeaderComment(*toData.Server.HostName, cfg.AppVersion(), toData.TrafficOpsURL, toData.TrafficOpsAddresses, genTime)\n\n\thasSSLMultiCertConfig := false\n\tconfigs := []t3cutil.ATSConfigFile{}\n\tfor _, fi := range configFiles {\n\t\tif cfg.RevalOnly && fi.Name != atscfg.RegexRevalidateFileName {\n\t\t\tcontinue\n\t\t}\n\t\ttxt, contentType, secure, lineComment, warnings, err := GetConfigFile(toData, fi, hdrCommentTxt, cfg)\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"getting config file '\" + fi.Name + \"': \" + err.Error())\n\t\t}\n\t\tif fi.Name == atscfg.SSLMultiCertConfigFileName {\n\t\t\thasSSLMultiCertConfig = true\n\t\t}\n\t\tconfigs = append(configs, t3cutil.ATSConfigFile{\n\t\t\tName: fi.Name,\n\t\t\tPath: fi.Path,\n\t\t\tText: txt,\n\t\t\tSecure: secure,\n\t\t\tContentType: contentType,\n\t\t\tLineComment: lineComment,\n\t\t\tWarnings: warnings,\n\t\t})\n\t}\n\n\tif hasSSLMultiCertConfig {\n\t\tsslConfigs, err := GetSSLCertsAndKeyFiles(toData)\n\t\tif err != nil {\n\t\t\treturn nil, errors.New(\"getting ssl key and cert config files: \" + err.Error())\n\t\t}\n\t\tconfigs = append(configs, sslConfigs...)\n\t}\n\n\treturn configs, nil\n}", "func genConfig(log logging.Logger, newEngineCfg newEngineCfgFn, accessPoints []string, nd *networkDetails, sd *storageDetails, ccs numaCoreCountsMap) (*config.Server, error) {\n\tif nd.engineCount == 0 {\n\t\treturn nil, errors.Errorf(errInvalNrEngines, 1, 0)\n\t}\n\n\tif len(nd.numaIfaces) < nd.engineCount {\n\t\treturn nil, errors.Errorf(errInsufNrIfaces, \"\", nd.engineCount,\n\t\t\tlen(nd.numaIfaces), nd.numaIfaces)\n\t}\n\n\tif len(sd.numaPMems) < nd.engineCount {\n\t\treturn nil, errors.Errorf(errInsufNrPMemGroups, sd.numaPMems, nd.engineCount,\n\t\t\tlen(sd.numaPMems))\n\t}\n\n\t// enforce consistent ssd count across engine configs\n\tminSsds := math.MaxUint32\n\tnumaWithMinSsds := 0\n\tif len(sd.numaSSDs) > 0 {\n\t\tif len(sd.numaSSDs) < nd.engineCount {\n\t\t\treturn nil, errors.New(\"invalid number of ssd groups\") // should never happen\n\t\t}\n\n\t\tfor numa, ssds := range sd.numaSSDs {\n\t\t\tif len(ssds) < minSsds {\n\t\t\t\tminSsds = len(ssds)\n\t\t\t\tnumaWithMinSsds = numa\n\t\t\t}\n\t\t}\n\t}\n\n\tif len(ccs) < nd.engineCount {\n\t\treturn nil, errors.New(\"invalid number of core count groups\") // should never happen\n\t}\n\t// enforce consistent target and helper count across engine configs\n\tnrTgts := ccs[numaWithMinSsds].nrTgts\n\tnrHlprs := ccs[numaWithMinSsds].nrHlprs\n\n\tengines := make([]*engine.Config, 0, nd.engineCount)\n\tfor nn := 0; nn < nd.engineCount; nn++ {\n\t\tengineCfg := newEngineCfg(nn).\n\t\t\tWithTargetCount(nrTgts).\n\t\t\tWithHelperStreamCount(nrHlprs)\n\t\tif len(sd.numaPMems) > 0 {\n\t\t\tengineCfg.WithStorage(\n\t\t\t\tstorage.NewTierConfig().\n\t\t\t\t\tWithStorageClass(storage.ClassDcpm.String()).\n\t\t\t\t\tWithScmMountPoint(fmt.Sprintf(\"%s%d\", scmMountPrefix, nn)).\n\t\t\t\t\tWithScmDeviceList(sd.numaPMems[nn][0]),\n\t\t\t)\n\t\t}\n\t\tif len(sd.numaSSDs) > 0 && len(sd.numaSSDs[nn]) > 0 {\n\t\t\tengineCfg.WithStorage(\n\t\t\t\tstorage.NewTierConfig().\n\t\t\t\t\tWithStorageClass(storage.ClassNvme.String()).\n\t\t\t\t\tWithBdevDeviceList(sd.numaSSDs[nn][:minSsds]...),\n\t\t\t)\n\t\t}\n\n\t\tpnn := uint(nn)\n\t\tengineCfg.PinnedNumaNode = &pnn\n\t\tengineCfg.Fabric = engine.FabricConfig{\n\t\t\tProvider: nd.numaIfaces[nn].Provider,\n\t\t\tInterface: nd.numaIfaces[nn].Device,\n\t\t\tInterfacePort: int(defaultFiPort + (nn * defaultFiPortInterval)),\n\t\t}\n\n\t\tengines = append(engines, engineCfg)\n\t}\n\n\tnumTargets := 0\n\tfor _, e := range engines {\n\t\tnumTargets += e.TargetCount\n\t}\n\n\treqHugePages, err := common.CalcMinHugePages(sd.hugePageSize, numTargets)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"unable to calculate minimum hugepages\")\n\t}\n\n\tcfg := config.DefaultServer().\n\t\tWithAccessPoints(accessPoints...).\n\t\tWithFabricProvider(engines[0].Fabric.Provider).\n\t\tWithEngines(engines...).\n\t\tWithControlLogFile(defaultControlLogFile).\n\t\tWithNrHugePages(reqHugePages)\n\n\treturn cfg, cfg.Validate(log, sd.hugePageSize, nil)\n}", "func (a *AdminKubeConfigCABundle) Generate(deps asset.Parents) error {\n\tvar certs []CertInterface\n\tfor _, asset := range a.Dependencies() {\n\t\tdeps.Get(asset)\n\t\tcerts = append(certs, asset.(CertInterface))\n\t}\n\treturn a.CertBundle.Generate(\"admin-kubeconfig-ca-bundle\", certs...)\n}", "func TestDefaultAuthorizer_DefaultACLs(t *testing.T) {\n\tctx, env := repotesting.NewEnvironment(t, repotesting.FormatNotImportant)\n\n\tfor _, e := range auth.DefaultACLs {\n\t\trequire.NoError(t, acl.AddACL(ctx, env.RepositoryWriter, e, false))\n\t}\n\n\tverifyLegacyAuthorizer(ctx, t, env.Repository, auth.DefaultAuthorizer())\n}", "func EnsureDefaults(cfg *config.Config) {\n\t// provide with defaults for shared logging, since we need a valid destination address for \"envdecode\".\n\tif cfg.Log == nil && cfg.Commons != nil && cfg.Commons.Log != nil {\n\t\tcfg.Log = &config.Log{\n\t\t\tLevel: cfg.Commons.Log.Level,\n\t\t\tPretty: cfg.Commons.Log.Pretty,\n\t\t\tColor: cfg.Commons.Log.Color,\n\t\t\tFile: cfg.Commons.Log.File,\n\t\t}\n\t} else if cfg.Log == nil {\n\t\tcfg.Log = &config.Log{}\n\t}\n\t// provide with defaults for shared tracing, since we need a valid destination address for \"envdecode\".\n\tif cfg.Tracing == nil && cfg.Commons != nil && cfg.Commons.Tracing != nil {\n\t\tcfg.Tracing = &config.Tracing{\n\t\t\tEnabled: cfg.Commons.Tracing.Enabled,\n\t\t\tType: cfg.Commons.Tracing.Type,\n\t\t\tEndpoint: cfg.Commons.Tracing.Endpoint,\n\t\t\tCollector: cfg.Commons.Tracing.Collector,\n\t\t}\n\t} else if cfg.Tracing == nil {\n\t\tcfg.Tracing = &config.Tracing{}\n\t}\n\n\tif cfg.Reva == nil && cfg.Commons != nil {\n\t\tcfg.Reva = structs.CopyOrZeroValue(cfg.Commons.Reva)\n\t}\n\n\tif cfg.MachineAuthAPIKey == \"\" && cfg.Commons != nil && cfg.Commons.MachineAuthAPIKey != \"\" {\n\t\tcfg.MachineAuthAPIKey = cfg.Commons.MachineAuthAPIKey\n\t}\n}", "func NewCertPool(CAFiles []string) (*x509.CertPool, error) {\n\tcertPool := x509.NewCertPool()\n\n\tfor _, CAFile := range CAFiles {\n\t\tpemByte, err := ioutil.ReadFile(CAFile)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tfor {\n\t\t\tvar block *pem.Block\n\t\t\tblock, pemByte = pem.Decode(pemByte)\n\t\t\tif block == nil {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tcert, err := x509.ParseCertificate(block.Bytes)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tcertPool.AddCert(cert)\n\t\t}\n\t}\n\n\treturn certPool, nil\n}", "func NewSecurityKeyManagerKeyServersCreateDefault(code int) *SecurityKeyManagerKeyServersCreateDefault {\n\treturn &SecurityKeyManagerKeyServersCreateDefault{\n\t\t_statusCode: code,\n\t}\n}", "func defaultFileNames() map[string]struct{} {\n\treturn map[string]struct{}{\n\t\tzkOperatorFile: {},\n\t\tzkOperatorVersionFile: {},\n\t\tzkPod2File: {},\n\t\tzkLog2Container1File: {},\n\t\tzkServicesFile: {},\n\t\tzkPod0File: {},\n\t\tzkLog0Container1File: {},\n\t\tzkLog0Container2File: {},\n\t\tzkInstanceFile: {},\n\t\tzkPod1File: {},\n\t\tzkLog1Container1File: {},\n\t\tzkStatefulSetsFile: {},\n\t\tchildOperatorFile: {},\n\t\tchildOperatorVersionFile: {},\n\t\tchildInstanceFile: {},\n\t\tversionFile: {},\n\t\tkmServicesFile: {},\n\t\tkmPodFile: {},\n\t\tkmLogFile: {},\n\t\tkmServiceAccountsFile: {},\n\t\tkmStatefulSetsFile: {},\n\t\tsettingsFile: {},\n\t}\n}", "func CreateAppConfigmap(name, trainingType, namespace, configFileName, appInfoFileName, chartName, chartVersion string) (err error) {\n\tif _, err = os.Stat(configFileName); os.IsNotExist(err) {\n\t\treturn err\n\t}\n\n\tif _, err = os.Stat(appInfoFileName); os.IsNotExist(err) {\n\t\treturn err\n\t}\n\n\targs := []string{\"create\", \"configmap\", fmt.Sprintf(\"%s-%s\", name, trainingType),\n\t\t\"--namespace\", namespace,\n\t\tfmt.Sprintf(\"--from-file=%s=%s\", \"values\", configFileName),\n\t\tfmt.Sprintf(\"--from-file=%s=%s\", \"app\", appInfoFileName),\n\t\tfmt.Sprintf(\"--from-literal=%s=%s\", chartName, chartVersion)}\n\t// \"--overrides='{\\\"metadata\\\":{\\\"label\\\":\\\"createdBy\\\": \\\"arena\\\"}}'\"}\n\tout, err := kubectl(args)\n\n\tfmt.Printf(\"%s\", string(out))\n\tif err != nil {\n\t\tlog.Debugf(\"Failed to execute %s, %v with %v\", \"kubectl\", args, err)\n\t}\n\n\treturn err\n}", "func configBuildAllConfigs(svc *route53.Route53, path string) {\n\n\t// Make sure the path exists to the best of our ability\n\tif _, err := os.Stat(path); os.IsNotExist(err) {\n\t\tos.Mkdir(path, os.ModeDir)\n\t}\n\n\tzones, err := getHostedZones(svc)\n\tif err != nil {\n\t\tlog.Fatalf(\"error obtaining hosted zones list with error: %s\", err)\n\t}\n\n\t// Iterate over all the hosted zones in the account\n\tfor _, val := range zones {\n\n\t\tvar config route53Zone\n\t\tzoneID := aws.StringValue(val.Id)\n\t\tzoneName := aws.StringValue(val.Name)\n\n\t\t// remove the /hostedzone/ path if it's there\n\t\tif strings.HasPrefix(zoneID, \"/hostedzone/\") {\n\t\t\tzoneID = strings.TrimPrefix(zoneID, \"/hostedzone/\")\n\t\t}\n\n\t\trrsets, err := listAllRecordSets(svc, zoneID)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error obtaining recordset for hosted zoneid %s with error: %s\", zoneID, err)\n\t\t}\n\n\t\tconfig.Name = zoneName\n\n\t\tfmt.Println(\"*****************************************\")\n\t\tfmt.Printf(\"Name: %s\\n\", zoneName)\n\t\tfmt.Println(\"*****************************************\")\n\n\t\tfor _, rrset := range rrsets {\n\n\t\t\tgetRoute53ZoneConfig(&config, rrset)\n\n\t\t}\n\n\t\t// Marshal data structure into YAML file\n\t\tyamlFile, err := yaml.Marshal(config)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error serializing config struct to YAML with error: %s\", err)\n\t\t}\n\n\t\t// Build the file path\n\t\tfilePath := path + string(os.PathSeparator) + strings.TrimSuffix(zoneName, \".\") + \".yaml\"\n\n\t\t// Write the file out\n\t\terr = ioutil.WriteFile(filePath, yamlFile, 0644)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Error generating configuration file %s with error %s\", filePath, err)\n\t\t}\n\n\t\t// Display some useful information\n\t\tfmt.Println(fmt.Sprintf(\"Records: %d\", len(config.ResourceRecordSets)))\n\t\tfmt.Println(fmt.Sprintf(\"Status: Created file %s\", filePath))\n\t}\n}", "func makeTestConfig(amount int) Config {\n\tvar conf Config\n\tconf.Cluster = make([]ClusterConfig, amount, amount)\n\tfor i := 0; i < amount; i++ {\n\t\tconf.Cluster[i].Name = \"cluster\" + strconv.Itoa(i)\n\t\tconf.Cluster[i].Address = \"10.0.0.\" + strconv.Itoa(i%255)\n\t\tconf.Cluster[i].ProtocolVersion = \"v1\"\n\t}\n\treturn conf\n}", "func CreateConfigMap(name string) *corev1.ConfigMap {\n\treturn &corev1.ConfigMap{\n\t\tTypeMeta: genTypeMeta(gvk.ConfigMap),\n\t\tObjectMeta: genObjectMeta(name, true),\n\t}\n}", "func newCA() {\n\terr := os.MkdirAll(shrubCA, 0700)\n\n\tpriv, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)\n\tfatalIfErr(err, \"failed to generate the CA key\")\n\tpub := priv.PublicKey\n\n\tserialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)\n\tserialNumber, err := rand.Int(rand.Reader, serialNumberLimit)\n\tfatalIfErr(err, \"failed to generate serial number\")\n\n\tspkiASN1, err := x509.MarshalPKIXPublicKey(&pub)\n\tfatalIfErr(err, \"failed to encode public key\")\n\n\tvar spki struct {\n\t\tAlgorithm pkix.AlgorithmIdentifier\n\t\tSubjectPublicKey asn1.BitString\n\t}\n\t_, err = asn1.Unmarshal(spkiASN1, &spki)\n\tfatalIfErr(err, \"failed to decode public key\")\n\n\tskid := sha1.Sum(spki.SubjectPublicKey.Bytes)\n\n\ttpl := &x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tSubject: pkix.Name{\n\t\t\tOrganization: []string{\"shrubgateway local CA\"},\n\t\t\tOrganizationalUnit: []string{userAndHostname},\n\n\t\t\t// The CommonName is required by iOS to show the certificate in the\n\t\t\t// \"Certificate Trust Settings\" menu.\n\t\t\t// https://github.com/FiloSottile/mkcert/issues/47\n\t\t\tCommonName: \"shrubgateway \" + userAndHostname,\n\t\t},\n\t\tSubjectKeyId: skid[:],\n\n\t\tNotAfter: time.Now().AddDate(10, 0, 0),\n\t\tNotBefore: time.Now().AddDate(0, 0, -1),\n\n\t\tKeyUsage: x509.KeyUsageCertSign,\n\n\t\tBasicConstraintsValid: true,\n\t\tIsCA: true,\n\t\tMaxPathLenZero: true,\n\n\t\t// for security reasons the local CA generated is restricted\n\t\t// to subdomains of \".localhost\"\n\t\tPermittedDNSDomains: []string{\".localhost\"},\n\t}\n\n\tcert, err := x509.CreateCertificate(rand.Reader, tpl, tpl, &pub, priv)\n\tfatalIfErr(err, \"failed to generate CA certificate\")\n\n\tprivDER, err := x509.MarshalPKCS8PrivateKey(priv)\n\tfatalIfErr(err, \"failed to encode CA key\")\n\terr = ioutil.WriteFile(filepath.Join(shrubCA, rootKeyName), pem.EncodeToMemory(\n\t\t&pem.Block{Type: \"PRIVATE KEY\", Bytes: privDER}), 0400)\n\tfatalIfErr(err, \"failed to save CA key\")\n\n\terr = ioutil.WriteFile(filepath.Join(shrubCA, rootName), pem.EncodeToMemory(\n\t\t&pem.Block{Type: \"CERTIFICATE\", Bytes: cert}), 0644)\n\tfatalIfErr(err, \"failed to save CA key\")\n\n\tlog.Printf(\"Created a new local CA at \\\"%s\\\"\\n\", shrubCA)\n}", "func (c *Client) CreateAutosalesConfigLines(acls []*AutosalesConfigLine) ([]int64, error) {\n\tvar vv []interface{}\n\tfor _, v := range acls {\n\t\tvv = append(vv, v)\n\t}\n\treturn c.Create(AutosalesConfigLineModel, vv)\n}", "func (f Factory) CreateDefaultConfig() configmodels.Exporter {\n\tcfg := f.Wrapped.CreateDefaultConfig().(*kafkaexporter.Config)\n\n\t// InitFromViper fails if certain fields are not set. Setting them here\n\t// to prevent the process from exiting.\n\tf.Viper.Set(\"kafka.producer.required-acks\", \"local\")\n\tf.Viper.Set(\"kafka.producer.compression\", \"none\")\n\n\topts := &kafka.Options{}\n\topts.InitFromViper(f.Viper)\n\n\tcfg.Encoding = mustOtelEncodingForJaegerEncoding(opts.Encoding)\n\tcfg.Topic = opts.Topic\n\tcfg.Brokers = opts.Config.Brokers\n\tcfg.ProtocolVersion = opts.Config.ProtocolVersion\n\n\tif opts.Config.Authentication == \"kerberos\" {\n\t\tcfg.Authentication.Kerberos = &kafkaexporter.KerberosConfig{\n\t\t\tServiceName: opts.Config.Kerberos.ServiceName,\n\t\t\tRealm: opts.Config.Kerberos.Realm,\n\t\t\tUseKeyTab: opts.Config.Kerberos.UseKeyTab,\n\t\t\tUsername: opts.Config.Kerberos.Username,\n\t\t\tPassword: opts.Config.Kerberos.Password,\n\t\t\tConfigPath: opts.Config.Kerberos.ConfigPath,\n\t\t\tKeyTabPath: opts.Config.Kerberos.KeyTabPath,\n\t\t}\n\t}\n\n\tif opts.Config.Authentication == \"plaintext\" {\n\t\tcfg.Authentication.PlainText = &kafkaexporter.PlainTextConfig{\n\t\t\tUsername: opts.Config.PlainText.Username,\n\t\t\tPassword: opts.Config.PlainText.Password,\n\t\t}\n\t}\n\n\tif opts.Config.Authentication == \"tls\" && opts.Config.TLS.Enabled {\n\t\tcfg.Authentication.TLS = &configtls.TLSClientSetting{\n\t\t\tTLSSetting: configtls.TLSSetting{\n\t\t\t\tCAFile: opts.Config.TLS.CAPath,\n\t\t\t\tCertFile: opts.Config.TLS.CertPath,\n\t\t\t\tKeyFile: opts.Config.TLS.KeyPath,\n\t\t\t},\n\t\t\tServerName: opts.Config.TLS.ServerName,\n\t\t\tInsecure: opts.Config.TLS.SkipHostVerify,\n\t\t}\n\t}\n\n\treturn cfg\n}", "func DefaultConfig() *Config {\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tconf := &Config{\n\t\tBuild: version.Version,\n\t\tDatacenter: DefaultDC,\n\t\tNodeName: hostname,\n\t\tRPCAddr: DefaultRPCAddr,\n\t\tRaftConfig: raft.DefaultConfig(),\n\t\tSerfLANConfig: lib.SerfDefaultConfig(),\n\t\tSerfWANConfig: lib.SerfDefaultConfig(),\n\t\tSerfFloodInterval: 60 * time.Second,\n\t\tReconcileInterval: 60 * time.Second,\n\t\tProtocolVersion: ProtocolVersion2Compatible,\n\t\tACLPolicyTTL: 30 * time.Second,\n\t\tACLTokenTTL: 30 * time.Second,\n\t\tACLDefaultPolicy: \"allow\",\n\t\tACLDownPolicy: \"extend-cache\",\n\t\tACLReplicationRate: 1,\n\t\tACLReplicationBurst: 5,\n\t\tACLReplicationApplyLimit: 100, // ops / sec\n\t\tTombstoneTTL: 15 * time.Minute,\n\t\tTombstoneTTLGranularity: 30 * time.Second,\n\t\tSessionTTLMin: 10 * time.Second,\n\n\t\t// These are tuned to provide a total throughput of 128 updates\n\t\t// per second. If you update these, you should update the client-\n\t\t// side SyncCoordinateRateTarget parameter accordingly.\n\t\tCoordinateUpdatePeriod: 5 * time.Second,\n\t\tCoordinateUpdateBatchSize: 128,\n\t\tCoordinateUpdateMaxBatches: 5,\n\n\t\tRPCRate: rate.Inf,\n\t\tRPCMaxBurst: 1000,\n\n\t\tTLSMinVersion: \"tls10\",\n\n\t\t// TODO (slackpad) - Until #3744 is done, we need to keep these\n\t\t// in sync with agent/config/default.go.\n\t\tAutopilotConfig: &autopilot.Config{\n\t\t\tCleanupDeadServers: true,\n\t\t\tLastContactThreshold: 200 * time.Millisecond,\n\t\t\tMaxTrailingLogs: 250,\n\t\t\tServerStabilizationTime: 10 * time.Second,\n\t\t},\n\n\t\tCAConfig: &structs.CAConfiguration{\n\t\t\tProvider: \"consul\",\n\t\t\tConfig: map[string]interface{}{\n\t\t\t\t\"RotationPeriod\": \"2160h\",\n\t\t\t\t\"LeafCertTTL\": \"72h\",\n\t\t\t},\n\t\t},\n\n\t\tServerHealthInterval: 2 * time.Second,\n\t\tAutopilotInterval: 10 * time.Second,\n\t}\n\n\t// Increase our reap interval to 3 days instead of 24h.\n\tconf.SerfLANConfig.ReconnectTimeout = 3 * 24 * time.Hour\n\tconf.SerfWANConfig.ReconnectTimeout = 3 * 24 * time.Hour\n\n\t// WAN Serf should use the WAN timing, since we are using it\n\t// to communicate between DC's\n\tconf.SerfWANConfig.MemberlistConfig = memberlist.DefaultWANConfig()\n\n\t// Ensure we don't have port conflicts\n\tconf.SerfLANConfig.MemberlistConfig.BindPort = DefaultLANSerfPort\n\tconf.SerfWANConfig.MemberlistConfig.BindPort = DefaultWANSerfPort\n\n\t// Raft protocol version 3 only works with other Consul servers running\n\t// 0.8.0 or later.\n\tconf.RaftConfig.ProtocolVersion = 3\n\n\t// Disable shutdown on removal\n\tconf.RaftConfig.ShutdownOnRemove = false\n\n\t// Check every 5 seconds to see if there are enough new entries for a snapshot, can be overridden\n\tconf.RaftConfig.SnapshotInterval = 30 * time.Second\n\n\t// Snapshots are created every 16384 entries by default, can be overridden\n\tconf.RaftConfig.SnapshotThreshold = 16384\n\n\treturn conf\n}", "func (Scanner) expoDefaultConfigs() (models.BitriseConfigMap, error) {\n\tconfigMap := models.BitriseConfigMap{}\n\n\t// primary workflow\n\tconfigBuilder := models.NewDefaultConfigBuilder()\n\n\tconfigBuilder.AppendStepListItemsTo(models.PrimaryWorkflowID, steps.DefaultPrepareStepList(false)...)\n\tconfigBuilder.AppendStepListItemsTo(models.PrimaryWorkflowID, steps.YarnStepListItem(envmanModels.EnvironmentItemModel{workDirInputKey: \"$WORKDIR\"}, envmanModels.EnvironmentItemModel{\"command\": \"install\"}))\n\tconfigBuilder.AppendStepListItemsTo(models.PrimaryWorkflowID, steps.YarnStepListItem(envmanModels.EnvironmentItemModel{workDirInputKey: \"$WORKDIR\"}, envmanModels.EnvironmentItemModel{\"command\": \"test\"}))\n\tconfigBuilder.AppendStepListItemsTo(models.PrimaryWorkflowID, steps.DefaultDeployStepList(false)...)\n\n\t// deploy workflow\n\tconfigBuilder.SetWorkflowDescriptionTo(models.DeployWorkflowID, deployWorkflowDescription)\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.DefaultPrepareStepList(false)...)\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.YarnStepListItem(envmanModels.EnvironmentItemModel{workDirInputKey: \"$WORKDIR\"}, envmanModels.EnvironmentItemModel{\"command\": \"install\"}))\n\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.ScriptSteplistItem(expoBareAddIdentiferScriptTitle,\n\t\tenvmanModels.EnvironmentItemModel{\"content\": expoBareAddIdentifiersScript(filepath.Join(\".\", expoAppJSONName), androidPackageEnvKey, iosBundleIDEnvKey)},\n\t))\n\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.ExpoDetachStepListItem(\n\t\tenvmanModels.EnvironmentItemModel{\"project_path\": \"$WORKDIR\"},\n\t))\n\n\t// android build\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.InstallMissingAndroidToolsStepListItem(\n\t\tenvmanModels.EnvironmentItemModel{android.GradlewPathInputKey: \"$\" + android.ProjectLocationInputEnvKey + \"/gradlew\"},\n\t))\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.AndroidBuildStepListItem(\n\t\tenvmanModels.EnvironmentItemModel{android.ProjectLocationInputKey: \"$\" + android.ProjectLocationInputEnvKey},\n\t\tenvmanModels.EnvironmentItemModel{android.ModuleInputKey: \"$\" + android.ModuleInputEnvKey},\n\t\tenvmanModels.EnvironmentItemModel{android.VariantInputKey: \"$\" + android.VariantInputEnvKey},\n\t))\n\n\t// ios build\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.CertificateAndProfileInstallerStepListItem())\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.XcodeArchiveStepListItem(\n\t\tenvmanModels.EnvironmentItemModel{ios.ProjectPathInputKey: \"$\" + ios.ProjectPathInputEnvKey},\n\t\tenvmanModels.EnvironmentItemModel{ios.SchemeInputKey: \"$\" + ios.SchemeInputEnvKey},\n\t\tenvmanModels.EnvironmentItemModel{ios.ExportMethodInputKey: \"$\" + ios.ExportMethodInputEnvKey},\n\t\tenvmanModels.EnvironmentItemModel{ios.ConfigurationInputKey: \"Release\"},\n\t))\n\n\tconfigBuilder.AppendStepListItemsTo(models.DeployWorkflowID, steps.DefaultDeployStepList(false)...)\n\n\tbitriseDataModel, err := configBuilder.Generate(scannerName)\n\tif err != nil {\n\t\treturn models.BitriseConfigMap{}, err\n\t}\n\n\tdata, err := yaml.Marshal(bitriseDataModel)\n\tif err != nil {\n\t\treturn models.BitriseConfigMap{}, err\n\t}\n\n\tconfigMap[expoDefaultConfigName] = string(data)\n\n\treturn configMap, nil\n}", "func configDefault(config ...Config) Config {\n\t// Return default config if nothing provided\n\tif len(config) < 1 {\n\t\treturn ConfigDefault\n\t}\n\n\t// Override default config\n\tcfg := config[0]\n\n\t// Set default values\n\n\tif cfg.Next == nil {\n\t\tcfg.Next = ConfigDefault.Next\n\t}\n\n\tif cfg.Lifetime.Nanoseconds() == 0 {\n\t\tcfg.Lifetime = ConfigDefault.Lifetime\n\t}\n\n\tif cfg.KeyHeader == \"\" {\n\t\tcfg.KeyHeader = ConfigDefault.KeyHeader\n\t}\n\tif cfg.KeyHeaderValidate == nil {\n\t\tcfg.KeyHeaderValidate = ConfigDefault.KeyHeaderValidate\n\t}\n\n\tif cfg.KeepResponseHeaders != nil && len(cfg.KeepResponseHeaders) == 0 {\n\t\tcfg.KeepResponseHeaders = ConfigDefault.KeepResponseHeaders\n\t}\n\n\tif cfg.Lock == nil {\n\t\tcfg.Lock = NewMemoryLock()\n\t}\n\n\tif cfg.Storage == nil {\n\t\tcfg.Storage = memory.New(memory.Config{\n\t\t\tGCInterval: cfg.Lifetime / 2, // Half the lifetime interval\n\t\t})\n\t}\n\n\treturn cfg\n}", "func (g *Gemini) GenerateDefaultSubscriptions() ([]stream.ChannelSubscription, error) {\n\t// See gemini_types.go for more subscription/candle vars\n\tvar channels = []string{\n\t\tmarketDataLevel2,\n\t\tcandles1d,\n\t}\n\n\tpairs, err := g.GetEnabledPairs(asset.Spot)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar subscriptions []stream.ChannelSubscription\n\tfor x := range channels {\n\t\tfor y := range pairs {\n\t\t\tsubscriptions = append(subscriptions, stream.ChannelSubscription{\n\t\t\t\tChannel: channels[x],\n\t\t\t\tCurrency: pairs[y],\n\t\t\t\tAsset: asset.Spot,\n\t\t\t})\n\t\t}\n\t}\n\treturn subscriptions, nil\n}", "func DefaultClientResources(target, nodeID, host string, port uint32) UpdateOptions {\n\tconst routeConfigName = \"route\"\n\tconst clusterName = \"cluster\"\n\tconst endpointsName = \"endpoints\"\n\n\treturn UpdateOptions{\n\t\tNodeID: nodeID,\n\t\tListeners: []*v3listenerpb.Listener{DefaultListener(target, routeConfigName)},\n\t\tRoutes: []*v3routepb.RouteConfiguration{DefaultRouteConfig(routeConfigName, target, clusterName)},\n\t\tClusters: []*v3clusterpb.Cluster{DefaultCluster(clusterName, endpointsName)},\n\t\tEndpoints: []*v3endpointpb.ClusterLoadAssignment{DefaultEndpoint(endpointsName, host, port)},\n\t}\n}", "func getDefaultKubeConfigFiles() []string {\n\tkc := os.Getenv(\"KUBECONFIG\")\n\tif kc != \"\" {\n\t\treturn strings.Split(kc, strconv.QuoteRune(filepath.ListSeparator))\n\t}\n\tvar homeDir string\n\tu, err := user.Current()\n\tif err != nil {\n\t\tlog.Printf(\"error getting current user, using $HOME, %v\\n\", err)\n\t\thomeDir = os.Getenv(\"HOME\")\n\t} else {\n\t\thomeDir = u.HomeDir\n\t}\n\tif homeDir != \"\" {\n\t\treturn []string{filepath.Join(homeDir, \".kube\", \"config\")}\n\t}\n\treturn nil\n}", "func SetDefaultAttributes(attrOriginal map[string]string) (map[string]string, error) {\n\tattr := make(map[string]string)\n\tfor k, v := range attrOriginal {\n\t\tattr[k] = v\n\t}\n\n\tsetDefaultIfEmpty(attr, csiapi.IssuerKindKey, cmapi.IssuerKind)\n\tsetDefaultIfEmpty(attr, csiapi.IssuerGroupKey, certmanager.GroupName)\n\n\tsetDefaultIfEmpty(attr, csiapi.IsCAKey, \"false\")\n\tsetDefaultIfEmpty(attr, csiapi.DurationKey, cmapi.DefaultCertificateDuration.String())\n\n\tsetDefaultIfEmpty(attr, csiapi.CAFileKey, \"ca.crt\")\n\tsetDefaultIfEmpty(attr, csiapi.CertFileKey, \"tls.crt\")\n\tsetDefaultIfEmpty(attr, csiapi.KeyFileKey, \"tls.key\")\n\n\tsetDefaultIfEmpty(attr, csiapi.KeyUsagesKey, strings.Join([]string{string(cmapi.UsageDigitalSignature), string(cmapi.UsageKeyEncipherment)}, \",\"))\n\n\treturn attr, nil\n}", "func ReadConfigFiles(filenames []string) (*Configuration, error) {\n\tconfig := DefaultConfiguration()\n\tfor _, filename := range filenames {\n\t\tif err := readConfigFile(config, filename); err != nil {\n\t\t\treturn config, err\n\t\t}\n\t}\n\t// Set default values for slices. These add rather than overwriting so we can't set\n\t// them upfront as we would with other config values.\n\tsetDefault(&config.Please.BuildFileName, []string{\"BUILD\"})\n\tsetDefault(&config.Build.Path, []string{\"/usr/local/bin\", \"/usr/bin\", \"/bin\"})\n\tsetDefault(&config.Cover.FileExtension, []string{\".go\", \".py\", \".java\", \".js\", \".cc\", \".h\", \".c\"})\n\tsetDefault(&config.Cover.ExcludeExtension, []string{\".pb.go\", \"_pb2.py\", \".pb.cc\", \".pb.h\", \"_test.py\", \"_test.go\", \"_pb.go\", \"_bindata.go\", \"_test_main.cc\"})\n\tsetDefault(&config.Proto.Language, []string{\"cc\", \"py\", \"java\", \"go\", \"js\"})\n\n\t// Default values for these guys depend on config.Please.Location.\n\tdefaultPath(&config.Cache.DirCacheCleaner, config.Please.Location, \"cache_cleaner\")\n\tdefaultPath(&config.Go.TestTool, config.Please.Location, \"please_go_test\")\n\tdefaultPath(&config.Python.PexTool, config.Please.Location, \"please_pex\")\n\tdefaultPath(&config.Java.JavacWorker, config.Please.Location, \"javac_worker\")\n\tdefaultPath(&config.Java.JarCatTool, config.Please.Location, \"jarcat\")\n\tdefaultPath(&config.Java.PleaseMavenTool, config.Please.Location, \"please_maven\")\n\tdefaultPath(&config.Java.JUnitRunner, config.Please.Location, \"junit_runner.jar\")\n\n\tif (config.Cache.RpcPrivateKey == \"\") != (config.Cache.RpcPublicKey == \"\") {\n\t\treturn config, fmt.Errorf(\"Must pass both rpcprivatekey and rpcpublickey properties for cache\")\n\t}\n\treturn config, nil\n}", "func DefaultConfig() *Config {\n\tconfig := &Config{\n\t\tOwnerProcName: \"\",\n\t\tOwnerReleaseInterval: 1 * time.Second,\n\t\tOwnerReleaseTimeout: 5 * time.Minute,\n\t\tSourcePattern: \"/tmp/rotate/source\",\n\t\tTempStorage: \"/tmp/rotate/tmp\",\n\t\tArchiveStorage: \"/tmp/rotate/archive\",\n\t\tFinalizeCommands: []string{},\n\t}\n\treturn config\n}", "func NewConfig(essentialfiles, commonfiles []string) (*Config, error) {\n\t// created config factory object\n\tfactory, err := goarchaius.NewConfigFactory(lager.Logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfactory.DeInit()\n\tfactory.Init()\n\n\tfiles := make([]string, 0)\n\t// created file source object\n\tfileSource := filesource.NewYamlConfigurationSource()\n\t// adding all files with file source\n\tfor _, v := range essentialfiles {\n\t\tif err := fileSource.AddFileSource(v, filesource.DefaultFilePriority); err != nil {\n\t\t\tlager.Logger.Errorf(err, \"add file source error.\")\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, v)\n\t}\n\tfor _, v := range commonfiles {\n\t\t_, err := os.Stat(v)\n\t\tif os.IsNotExist(err) {\n\t\t\tlager.Logger.Infof(\"[%s] not exist\", v)\n\t\t\tcontinue\n\t\t}\n\t\tif err := fileSource.AddFileSource(v, filesource.DefaultFilePriority); err != nil {\n\t\t\tlager.Logger.Infof(\"%v\", err)\n\t\t\treturn nil, err\n\t\t}\n\t\tfiles = append(files, v)\n\t}\n\n\terr = factory.AddSource(fileSource)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tconf := &Config{\n\t\tConfigFiles: files,\n\t\tConfigFactory: factory,\n\t}\n\n\teventHandler := EventListener{\n\t\tName: \"EventHandler\",\n\t\tFactory: factory,\n\t}\n\n\tfactory.RegisterListener(eventHandler, \"a*\")\n\n\tlager.Logger.Infof(\"Configuration files: %s\", strings.Join(files, \", \"))\n\treturn conf, nil\n}", "func CreateInitialConfig(kubeconfigPath string) error {\n\tparentDir := filepath.Dir(kubeconfigPath)\n\tif err := os.MkdirAll(parentDir, 0755); err != nil {\n\t\treturn errors.WithStackTrace(err)\n\t}\n\n\tif err := ioutil.WriteFile(kubeconfigPath, []byte(INITIAL_BLANK_KUBECONFIG), 0644); err != nil {\n\t\treturn errors.WithStackTrace(err)\n\t}\n\treturn nil\n}", "func configureCAVolumes(d *appsv1.Deployment, tenants *lokiv1.TenantsSpec) error {\n\tif tenants.Authentication == nil {\n\t\treturn nil // nothing to do\n\t}\n\n\tmountCAConfigMap := func(container *corev1.Container, volumes *[]corev1.Volume, tennantName, configmapName string) {\n\t\tcontainer.VolumeMounts = append(container.VolumeMounts, corev1.VolumeMount{\n\t\t\tName: tenantCAVolumeName(tennantName),\n\t\t\tMountPath: tenantCADir(tennantName),\n\t\t})\n\t\t*volumes = append(*volumes, corev1.Volume{\n\t\t\tName: tenantCAVolumeName(tennantName),\n\t\t\tVolumeSource: corev1.VolumeSource{\n\t\t\t\tConfigMap: &corev1.ConfigMapVolumeSource{\n\t\t\t\t\tLocalObjectReference: corev1.LocalObjectReference{\n\t\t\t\t\t\tName: configmapName,\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t})\n\t}\n\n\tvar gwIndex int\n\tfor i, c := range d.Spec.Template.Spec.Containers {\n\t\tif c.Name == gatewayContainerName {\n\t\t\tgwIndex = i\n\t\t\tbreak\n\t\t}\n\t}\n\n\tgwContainer := d.Spec.Template.Spec.Containers[gwIndex].DeepCopy()\n\tgwArgs := gwContainer.Args\n\tgwVolumes := d.Spec.Template.Spec.Volumes\n\n\tmTLS := false\n\tfor _, tenant := range tenants.Authentication {\n\t\tswitch {\n\t\tcase tenant.OIDC != nil:\n\t\t\tif tenant.OIDC.IssuerCA != nil {\n\t\t\t\tmountCAConfigMap(gwContainer, &gwVolumes, tenant.TenantName, tenant.OIDC.IssuerCA.CA)\n\t\t\t}\n\t\tcase tenant.MTLS != nil:\n\t\t\tmountCAConfigMap(gwContainer, &gwVolumes, tenant.TenantName, tenant.MTLS.CA.CA)\n\t\t\tmTLS = true\n\t\t}\n\t}\n\n\tif mTLS {\n\t\t// Remove old tls.client-auth-type\n\t\tfor i, arg := range gwArgs {\n\t\t\tif strings.HasPrefix(arg, \"--tls.client-auth-type=\") {\n\t\t\t\tgwArgs = append(gwArgs[:i], gwArgs[i+1:]...)\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tgwArgs = append(gwArgs, \"--tls.client-auth-type=RequestClientCert\")\n\t\tgwContainer.Args = gwArgs\n\t}\n\n\tp := corev1.PodSpec{\n\t\tContainers: []corev1.Container{\n\t\t\t*gwContainer,\n\t\t},\n\t\tVolumes: gwVolumes,\n\t}\n\tif err := mergo.Merge(&d.Spec.Template.Spec, p, mergo.WithOverride); err != nil {\n\t\treturn kverrors.Wrap(err, \"failed to merge server pki into container spec \")\n\t}\n\treturn nil\n}", "func makeTestCreateConfig() *CreateConfig {\n\tcc := new(CreateConfig)\n\tcc.Resources = CreateResourceConfig{}\n\tcc.User.IDMappings = new(storage.IDMappingOptions)\n\tcc.User.IDMappings.UIDMap = []idtools.IDMap{}\n\tcc.User.IDMappings.GIDMap = []idtools.IDMap{}\n\n\treturn cc\n}", "func (gw *Gateway) GetDefaultConnections() []string {\n\tvar conns []string\n\tgw.strand(\"GetDefaultConnections\", func() {\n\t\tconns = make([]string, len(gw.d.Config.DefaultConnections))\n\t\tcopy(conns[:], gw.d.Config.DefaultConnections[:])\n\t})\n\treturn conns\n}", "func (scl *SimpleConfigurationLayer) setDefaults() {\n\tscl.Changelog = ent.NewChangelogConfiguration()\n\tscl.CommitMessageConventions = ent.NewCommitMessageConventions()\n\tscl.Git = ent.NewGitConfiguration()\n\tsvra := make(map[string]*ent.Attachment)\n\tscl.ReleaseAssets = &svra\n\tscl.ReleaseTypes = ent.NewReleaseTypes()\n\tsvsc := make(map[string]*ent.ServiceConfiguration)\n\tscl.Services = &svsc\n\tscl.Substitutions = ent.NewSubstitutions()\n}", "func createTlsConfiguration(certFile string, keyFile string, caFile string, verifySsl bool) (t *tls.Config) {\n\tif certFile != \"\" && keyFile != \"\" && caFile != \"\" {\n\t\tcert, err := tls.LoadX509KeyPair(certFile, keyFile)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tcaCert, err := ioutil.ReadFile(caFile)\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\n\t\tcaCertPool := x509.NewCertPool()\n\t\tcaCertPool.AppendCertsFromPEM(caCert)\n\n\t\tt = &tls.Config{\n\t\t\tCertificates: []tls.Certificate{cert},\n\t\t\tRootCAs: caCertPool,\n\t\t\tInsecureSkipVerify: verifySsl,\n\t\t}\n\t}\n\treturn t\n}", "func (f *ConfigMapFactory) MakeConfigMap(\n\targs *types.ConfigMapArgs, options *types.GeneratorOptions) (*corev1.ConfigMap, error) {\n\tvar all []kv.Pair\n\tvar err error\n\tcm := f.makeFreshConfigMap(args)\n\n\tpairs, err := keyValuesFromEnvFile(f.ldr, args.EnvSource)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\n\t\t\t\"env source file: %s\",\n\t\t\targs.EnvSource))\n\t}\n\tall = append(all, pairs...)\n\n\tpairs, err = keyValuesFromLiteralSources(args.LiteralSources)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\n\t\t\t\"literal sources %v\", args.LiteralSources))\n\t}\n\tall = append(all, pairs...)\n\n\tpairs, err = keyValuesFromFileSources(f.ldr, args.FileSources)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, fmt.Sprintf(\n\t\t\t\"file sources: %v\", args.FileSources))\n\t}\n\tall = append(all, pairs...)\n\n\tfor _, p := range all {\n\t\terr = addKvToConfigMap(cm, p.Key, p.Value)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif options != nil {\n\t\tcm.SetLabels(options.Labels)\n\t\tcm.SetAnnotations(options.Annotations)\n\t}\n\treturn cm, nil\n}", "func ClientDefault(certPath, keyPath, caPath string) *tls.Config {\n\t// Load our TLS key pair to use for authentication\n\tcert, err := tls.LoadX509KeyPair(certPath, keyPath)\n\tif err != nil {\n\t\tlog.Fatalln(\"Unable to load cert\", err)\n\t}\n\n\t// Load our CA certificate\n\tclientCACert, err := ioutil.ReadFile(caPath)\n\tif err != nil {\n\t\tlog.Fatal(\"Unable to open cert\", err)\n\t}\n\n\t// Append client certificate to cert pool\n\tclientCertPool := x509.NewCertPool()\n\tclientCertPool.AppendCertsFromPEM(clientCACert)\n\n\t// Build default TLS configuration\n\tconfig := &tls.Config{\n\t\t// Perfect Forward Secrecy + ECDSA only\n\t\tCipherSuites: []uint16{\n\t\t\ttls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,\n\t\t\ttls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,\n\t\t},\n\t\t// Force server cipher suites\n\t\tPreferServerCipherSuites: true,\n\t\t// TLS 1.2 only\n\t\tMinVersion: tls.VersionTLS12,\n\t\t// Client certificate to use\n\t\tCertificates: []tls.Certificate{cert},\n\t\t// Root CA of the client certificate\n\t\tRootCAs: clientCertPool,\n\t}\n\n\t// Parse CommonName and SubjectAlternateName\n\tconfig.BuildNameToCertificate()\n\n\t// Return configuration\n\treturn config\n}" ]
[ "0.606134", "0.5944276", "0.5602694", "0.55962527", "0.55962527", "0.55585545", "0.5549638", "0.5523388", "0.54623073", "0.5458472", "0.5448604", "0.5401323", "0.5397742", "0.5371387", "0.53665453", "0.5318795", "0.52953887", "0.5283596", "0.52765644", "0.5207789", "0.5194221", "0.5145408", "0.5129437", "0.5124677", "0.51239437", "0.5122584", "0.51117843", "0.5106644", "0.5106321", "0.5102221", "0.50967526", "0.50556296", "0.5054974", "0.5053191", "0.50494623", "0.50284", "0.50173044", "0.50062907", "0.49924394", "0.49808714", "0.4971099", "0.49599737", "0.49571127", "0.4957111", "0.49495143", "0.49428684", "0.49420324", "0.49226743", "0.49204516", "0.49190122", "0.49188405", "0.49188405", "0.489607", "0.48872036", "0.48742706", "0.48735633", "0.48731256", "0.4866776", "0.48628104", "0.48601794", "0.48529717", "0.48487666", "0.48487246", "0.48469576", "0.4831076", "0.48290133", "0.482697", "0.48258132", "0.48208842", "0.48131657", "0.4812396", "0.4790829", "0.47795194", "0.4779109", "0.47778204", "0.4775613", "0.47717565", "0.47663456", "0.47629893", "0.47585297", "0.4758382", "0.4752952", "0.4752416", "0.47502923", "0.4748012", "0.47461212", "0.4745685", "0.4745025", "0.47442764", "0.474315", "0.47421402", "0.47415712", "0.47371775", "0.47332105", "0.47306758", "0.4721189", "0.4720949", "0.4715708", "0.4714553", "0.4705125" ]
0.84748524
0
GetCA returns the CA given its name
func (s *Server) GetCA(name string) (*CA, error) { // Lookup the CA from the server ca := s.caMap[name] if ca == nil { return nil, caerrors.NewHTTPErr(404, caerrors.ErrCANotFound, "CA '%s' does not exist", name) } return ca, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *CertGenerator) GetCA() *CA { return c.CA }", "func (ctx *serverRequestContextImpl) getCA() (*CA, error) {\n\tif ctx.ca == nil {\n\t\t// Get the CA name\n\t\tname, err := ctx.getCAName()\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t// Get the CA by its name\n\t\tctx.ca, err = ctx.endpoint.Server.GetCA(name)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn ctx.ca, nil\n}", "func (h *CAHandler) Get(ctx context.Context, project string) (string, error) {\n\tbts, err := h.client.doGetRequest(ctx, buildPath(\"project\", project, \"kms\", \"ca\"), nil)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar r ProjectCAResponse\n\terrR := checkAPIResponse(bts, &r)\n\n\treturn r.CACertificate, errR\n}", "func (o *LDAPIdentityProvider) GetCA() (value string, ok bool) {\n\tok = o != nil && o.bitmap_&1 != 0\n\tif ok {\n\t\tvalue = o.ca\n\t}\n\treturn\n}", "func (o *LDAPIdentityProvider) CA() string {\n\tif o != nil && o.bitmap_&1 != 0 {\n\t\treturn o.ca\n\t}\n\treturn \"\"\n}", "func (ctx *serverRequestContextImpl) GetCA() (*CA, error) {\n\t_, err := ctx.getCA()\n\tif err != nil {\n\t\treturn nil, errors.WithMessage(err, \"Failed to get CA instance\")\n\t}\n\tif ctx.ca.db == nil || !ctx.ca.db.IsInitialized() {\n\t\terr := ctx.ca.initDB(ctx.ca.server.dbMetrics)\n\t\tif err != nil {\n\t\t\treturn nil, errors.WithMessage(err, fmt.Sprintf(\"%s handler failed to initialize DB\", strings.TrimLeft(ctx.req.URL.String(), \"/\")))\n\t\t}\n\t\terr = ctx.ca.issuer.Init(false, ctx.ca.db, ctx.ca.levels)\n\t\tif err != nil {\n\t\t\treturn nil, nil\n\t\t}\n\t}\n\treturn ctx.ca, nil\n}", "func TestGetCAName(t *testing.T) {\n\tfabricCAClient, err := NewFabricCAClient(org1, configImp, cryptoSuiteProvider)\n\tif err != nil {\n\t\tt.Fatalf(\"NewFabricCAClient returned error: %v\", err)\n\t}\n\tif fabricCAClient.CAName() != \"test\" {\n\t\tt.Fatalf(\"CAName returned wrong value: %s\", fabricCAClient.CAName())\n\t}\n}", "func (ctx *serverRequestContextImpl) getCAName() (string, error) {\n\t// Check the query parameters first\n\tca := ctx.req.URL.Query().Get(\"ca\")\n\tif ca != \"\" {\n\t\treturn ca, nil\n\t}\n\t// Next, check the request body, if there is one\n\tvar body caNameReqBody\n\t_, err := ctx.TryReadBody(&body)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tif body.CAName != \"\" {\n\t\treturn body.CAName, nil\n\t}\n\t// No CA name in the request body either, so use the default CA name\n\treturn ctx.endpoint.Server.CA.Config.CA.Name, nil\n}", "func caInfo(orgName string, sdk *fabsdk.FabricSDK) (*msp.GetCAInfoResponse, error) {\n\tmspClient, err := msp.New(sdk.Context(), msp.WithOrg(orgName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn mspClient.GetCAInfo()\n}", "func (w *FabricSDKWrapper) getCAForOrganization(context context.Client, org string) (string, error) {\n\torganizations := context.EndpointConfig().NetworkConfig().Organizations\n\tif organizations == nil || len(organizations) == 0 {\n\t\treturn \"\", errors.New(\"no organizations found\")\n\t}\n\tif organizations[org].CertificateAuthorities == nil || len(organizations[org].CertificateAuthorities) == 0 {\n\t\treturn \"\", errors.New(\"no certificate authorities found\")\n\t}\n\tca := organizations[org].CertificateAuthorities[0]\n\treturn ca, errors.New(\"no CA found for the organization\")\n}", "func pathFetchCA(b *backend) *framework.Path {\n\treturn &framework.Path{\n\t\tPattern: `ca(/pem)?`,\n\n\t\tCallbacks: map[logical.Operation]framework.OperationFunc{\n\t\t\tlogical.ReadOperation: b.pathFetchRead,\n\t\t},\n\n\t\tHelpSynopsis: pathFetchHelpSyn,\n\t\tHelpDescription: pathFetchHelpDesc,\n\t}\n}", "func (a tlsCredentials) getCAFilename() string {\n\tif a.clientCA.Secret != nil {\n\t\treturn a.clientCA.Secret.Key\n\t} else if a.clientCA.ConfigMap != nil {\n\t\treturn a.clientCA.ConfigMap.Key\n\t}\n\n\treturn \"\"\n}", "func (m *MockPKIService) GetCA() ([]byte, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetCA\")\n\tret0, _ := ret[0].([]byte)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (s Signer) ReadCA() (string, error) {\n\treturn string(ssh.MarshalAuthorizedKey(s.CACert)), nil\n}", "func (impl *ldapAuthImpl) GetCAPath() string {\n\timpl.RLock()\n\tdefer impl.RUnlock()\n\n\treturn impl.caPath\n}", "func GenerateCA(commonName, organizationalUnit string) (*CA, error) {\n\tcfg := &CertCfg{\n\t\tSubject: pkix.Name{CommonName: commonName, OrganizationalUnit: []string{organizationalUnit}},\n\t\tKeyUsages: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign,\n\t\tExtKeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth, x509.ExtKeyUsageServerAuth},\n\t\tValidity: ValidityTenYears,\n\t\tIsCA: true,\n\t}\n\n\tkey, crt, err := GenerateSelfSignedCertificate(cfg)\n\tif err != nil {\n\t\treturn nil, errors.Wrapf(err, \"failed to generate CA with cn=%s,ou=%s\", commonName, organizationalUnit)\n\t}\n\treturn &CA{Key: key, Cert: crt}, nil\n}", "func caFromContext(ctx context.Context) CA {\n\tca, _ := ctx.Value(ctxKeyCA).(CA)\n\treturn ca\n}", "func (h *Handler) GetCACert(ctx context.Context) (SCEPResponse, error) {\n\n\tcerts, err := h.Auth.GetCACertificates()\n\tif err != nil {\n\t\treturn SCEPResponse{}, err\n\t}\n\n\tif len(certs) == 0 {\n\t\treturn SCEPResponse{}, errors.New(\"missing CA cert\")\n\t}\n\n\tresponse := SCEPResponse{\n\t\tOperation: opnGetCACert,\n\t\tCACertNum: len(certs),\n\t}\n\n\tif len(certs) == 1 {\n\t\tresponse.Data = certs[0].Raw\n\t} else {\n\t\t// create degenerate pkcs7 certificate structure, according to\n\t\t// https://tools.ietf.org/html/rfc8894#section-4.2.1.2, because\n\t\t// not signed or encrypted data has to be returned.\n\t\tdata, err := microscep.DegenerateCertificates(certs)\n\t\tif err != nil {\n\t\t\treturn SCEPResponse{}, err\n\t\t}\n\t\tresponse.Data = data\n\t}\n\n\treturn response, nil\n}", "func New() (*CA, error) {\n\treturn &CA{}, nil\n}", "func getIdentityByCaName(id, caName, orgName string, sdk *fabsdk.FabricSDK) (*msp.IdentityResponse, error) {\n\tmspClient, err := msp.New(sdk.Context(), msp.WithOrg(orgName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn mspClient.GetIdentity(id, msp.WithCA(caName))\n}", "func (s TLSSpec) GetCASecretName() string {\n\treturn util.TypeOrDefault[string](s.CASecretName)\n}", "func (cfg X509Config) GetCAPemFileName() string {\n\treturn cfg.RootCA.CAName + \".\" + certFileExt\n}", "func (o *Operator) GetLicenseCA() ([]byte, error) {\n\tif !o.isOpsCenter() {\n\t\treturn nil, trace.BadParameter(\"not a Gravity Hub\")\n\t}\n\tca, err := pack.ReadCertificateAuthority(o.packages())\n\tif err != nil {\n\t\treturn nil, trace.Wrap(err)\n\t}\n\treturn ca.CertPEM, nil\n}", "func TemplateCA(cn string) *x509.Certificate {\n\tca := Template(cn)\n\tca.KeyUsage = x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign | x509.KeyUsageCRLSign\n\tca.IsCA = true\n\tca.MaxPathLen = certMaxPathLen\n\tca.MaxPathLenZero = true\n\treturn ca\n}", "func New() *CA {\n\treturn &CA{\n\t\tPair: &Pair{},\n\t\tCertFileName: RootCertFileName,\n\t\tKeyFileName: RootKeyFileName,\n\t}\n}", "func (o LookupDatabaseMysqlResultOutput) CaCert() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupDatabaseMysqlResult) string { return v.CaCert }).(pulumi.StringOutput)\n}", "func (c *CertInfo) CA() *x509.Certificate {\n\treturn c.ca\n}", "func (c *CertificateClient) GetByName(ctx context.Context, name string) (*Certificate, *Response, error) {\n\tif name == \"\" {\n\t\treturn nil, nil, nil\n\t}\n\tCertificate, response, err := c.List(ctx, CertificateListOpts{Name: name})\n\tif len(Certificate) == 0 {\n\t\treturn nil, response, err\n\t}\n\treturn Certificate[0], response, err\n}", "func (s impl) Get(name string) (*computev1.SslCertificate, error) {\n\treturn s.service.SslCertificates.Get(s.projectID, name).Do()\n}", "func Get(c *deis.Client, name string) (api.Cert, error) {\n\turl := fmt.Sprintf(\"/v2/certs/%s\", name)\n\tres, reqErr := c.Request(\"GET\", url, nil)\n\tif reqErr != nil {\n\t\treturn api.Cert{}, reqErr\n\t}\n\tdefer res.Body.Close()\n\n\tresCert := api.Cert{}\n\tif err := json.NewDecoder(res.Body).Decode(&resCert); err != nil {\n\t\treturn api.Cert{}, err\n\t}\n\n\treturn resCert, reqErr\n}", "func GetOrCreateCaCert() (*x509.Certificate, *rsa.PrivateKey, error) {\n\tvar tlsCACertPath, tlsCAKeyPath string\n\tif os.Getenv(\"CI\") == \"true\" {\n\t\t// When in CI we create the cert dir in the root directory instead.\n\t\ttlsCACertPath = path.Join(\"/ca\", \"ca.crt\")\n\t\ttlsCAKeyPath = path.Join(\"/ca\", \"ca.key\")\n\t} else {\n\t\twd, err := os.Getwd()\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\ttlsCACertPath = path.Join(wd, \"ca\", \"ca.crt\")\n\t\ttlsCAKeyPath = path.Join(wd,\"ca\", \"ca.key\")\n\t\tif _, err := os.Stat(path.Join(wd, \"ca\")); os.IsNotExist(err) {\n\t\t\terr = os.Mkdir(path.Join(wd, \"ca\"), 0770)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t}\n\t}\n\tif _, err := os.Stat(tlsCACertPath); err == nil {\n\t\tif _, err := os.Stat(tlsCAKeyPath); err == nil {\n\t\t\t// We already created a CA cert, let's use that.\n\t\t\tdat, err := ioutil.ReadFile(tlsCACertPath)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tblock, _ := pem.Decode([]byte(dat))\n\t\t\tif block == nil || block.Type != \"CERTIFICATE\" {\n\t\t\t\treturn nil, nil, errors.New(\"ca.crt is not a valid pem encoded x509 cert\")\n\t\t\t}\n\t\t\tcaCerts, err := x509.ParseCertificates(block.Bytes)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tif len(caCerts) != 1 {\n\t\t\t\treturn nil, nil, errors.New(\"ca.crt contains none or more than one cert\")\n\t\t\t}\n\t\t\tcaCert := caCerts[0]\n\t\t\tdat, err = ioutil.ReadFile(tlsCAKeyPath)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tblock, _ = pem.Decode([]byte(dat))\n\t\t\tif block == nil || block.Type != \"RSA PRIVATE KEY\" {\n\t\t\t\treturn nil, nil, errors.New(\"ca.key is not a valid pem encoded rsa private key\")\n\t\t\t}\n\t\t\tpriv, err := x509.ParsePKCS1PrivateKey(block.Bytes)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\treturn caCert, priv, nil\n\t\t}\n\t}\n\n\tcertificateDuration := time.Hour * 5\n\tpriv, err := rsa.GenerateKey(rand.Reader, 4096)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tnotBefore := time.Now()\n\tnotAfter := notBefore.Add(certificateDuration)\n\tserialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)\n\tserialNumber, err := rand.Int(rand.Reader, serialNumberLimit)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tcaCert := x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tNotBefore: notBefore,\n\t\tNotAfter: notAfter,\n\t\tIsCA: true,\n\t\tKeyUsage: x509.KeyUsageCertSign | x509.KeyUsageDigitalSignature | x509.KeyUsageCRLSign,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},\n\t\tBasicConstraintsValid: true,\n\t}\n\n\tderBytes, err := x509.CreateCertificate(rand.Reader, &caCert, &caCert, &priv.PublicKey, priv)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tcertOut, err := os.Create(tlsCACertPath)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\t\n\tdefer certOut.Close() // nolint: errcheck\n\tif err = pem.Encode(certOut, &pem.Block{Type: \"CERTIFICATE\", Bytes: derBytes}); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tkeyOut, err := os.OpenFile(tlsCAKeyPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tdefer keyOut.Close() // nolint: errcheck\n\terr = pem.Encode(keyOut, &pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(priv),\n\t})\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\treturn &caCert, priv, nil\n}", "func loadCA(caFile string) *x509.CertPool {\n\tpool := x509.NewCertPool()\n\n\tif ca, err := ioutil.ReadFile(caFile); err != nil {\n\t\tlog.Fatal(\"Fatal Error at Certification ReadFile: \", err)\n\t} else {\n\t\tpool.AppendCertsFromPEM(ca)\n\t}\n\treturn pool\n}", "func readCAFile(f string) ([]byte, error) {\n\tdata, err := ioutil.ReadFile(f)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to load specified CA cert %s: %s\", f, err)\n\t}\n\treturn data, nil\n}", "func GetOrCreateCaCert() (*x509.Certificate, *rsa.PrivateKey, error) {\n\tvar tlsCACertPath, tlsCAKeyPath string\n\tif os.Getenv(\"CI\") == \"true\" {\n\t\t// When in CI we create the cert dir in the root directory instead.\n\t\ttlsCACertPath = path.Join(\"/ca\", \"ca.crt\")\n\t\ttlsCAKeyPath = path.Join(\"/ca\", \"ca.key\")\n\t} else {\n\t\twd, err := os.Getwd()\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t\ttlsCACertPath = path.Join(wd, \"ca\", \"ca.crt\")\n\t\ttlsCAKeyPath = path.Join(wd, \"ca\", \"ca.key\")\n\t\tif _, err := os.Stat(path.Join(wd, \"ca\")); os.IsNotExist(err) {\n\t\t\terr = os.Mkdir(path.Join(wd, \"ca\"), 0770)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t}\n\t}\n\tif _, err := os.Stat(tlsCACertPath); err == nil {\n\t\tif _, err := os.Stat(tlsCAKeyPath); err == nil {\n\t\t\t// We already created a CA cert, let's use that.\n\t\t\tdat, err := ioutil.ReadFile(tlsCACertPath)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tblock, _ := pem.Decode([]byte(dat))\n\t\t\tif block == nil || block.Type != \"CERTIFICATE\" {\n\t\t\t\treturn nil, nil, errors.New(\"ca.crt is not a valid pem encoded x509 cert\")\n\t\t\t}\n\t\t\tcaCerts, err := x509.ParseCertificates(block.Bytes)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tif len(caCerts) != 1 {\n\t\t\t\treturn nil, nil, errors.New(\"ca.crt contains none or more than one cert\")\n\t\t\t}\n\t\t\tcaCert := caCerts[0]\n\t\t\tdat, err = ioutil.ReadFile(tlsCAKeyPath)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\tblock, _ = pem.Decode([]byte(dat))\n\t\t\tif block == nil || block.Type != \"RSA PRIVATE KEY\" {\n\t\t\t\treturn nil, nil, errors.New(\"ca.key is not a valid pem encoded rsa private key\")\n\t\t\t}\n\t\t\tpriv, err := x509.ParsePKCS1PrivateKey(block.Bytes)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t\treturn caCert, priv, nil\n\t\t}\n\t}\n\n\t// valid for 10 years\n\tcertificateDuration := time.Hour * 24 * 365 * 10\n\tpriv, err := rsa.GenerateKey(rand.Reader, 4096)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tnotBefore := time.Now()\n\tnotAfter := notBefore.Add(certificateDuration)\n\tserialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)\n\tserialNumber, err := rand.Int(rand.Reader, serialNumberLimit)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tcaCert := x509.Certificate{\n\t\tSerialNumber: serialNumber,\n\t\tNotBefore: notBefore,\n\t\tNotAfter: notAfter,\n\t\tIsCA: true,\n\t\tKeyUsage: x509.KeyUsageCertSign | x509.KeyUsageDigitalSignature | x509.KeyUsageCRLSign,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},\n\t\tBasicConstraintsValid: true,\n\t\tSubject: pkix.Name{\n\t\t\tOrganization: []string{\"matrix.org\"},\n\t\t\tCountry: []string{\"GB\"},\n\t\t\tProvince: []string{\"London\"},\n\t\t\tLocality: []string{\"London\"},\n\t\t\tStreetAddress: []string{\"123 Street\"},\n\t\t\tPostalCode: []string{\"12345\"},\n\t\t},\n\t}\n\n\tderBytes, err := x509.CreateCertificate(rand.Reader, &caCert, &caCert, &priv.PublicKey, priv)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tcertOut, err := os.Create(tlsCACertPath)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tdefer certOut.Close() // nolint: errcheck\n\tif err = pem.Encode(certOut, &pem.Block{Type: \"CERTIFICATE\", Bytes: derBytes}); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tkeyOut, err := os.OpenFile(tlsCAKeyPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tdefer keyOut.Close() // nolint: errcheck\n\terr = pem.Encode(keyOut, &pem.Block{\n\t\tType: \"RSA PRIVATE KEY\",\n\t\tBytes: x509.MarshalPKCS1PrivateKey(priv),\n\t})\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\treturn &caCert, priv, nil\n}", "func getCaFileAndDownloadIfRequired(c *cli.Context) (string, error) {\n\t// have we done this already?\n\tif len(caFile) > 0 {\n\t\treturn caFile, nil\n\t}\n\tca := c.String(FlagCa)\n\t// Detect if using a URL scheme\n\tif uri, _ := url.ParseRequestURI(ca); uri != nil {\n\t\tif uri.Scheme == \"\" {\n\t\t\t// Not a URL, get out of here\n\t\t\treturn ca, nil\n\t\t}\n\t}\n\t// Where should we save the ca?\n\tif c.IsSet(FlagCaFile) {\n\t\tcaFile = c.String(FlagCaFile)\n\t} else {\n\t\t// This is used by cleanup\n\t\tcaFile = filepath.Join(getKdTempDir(), \"kube-ca.pem\")\n\t}\n\n\t// skip download if ca file already exists\n\tif found, err := FilesExists(caFile); err != nil {\n\t\treturn \"\", err\n\t} else if found {\n\t\tlogDebug.Printf(\"ca file (%s) already exists, skipping download from: %s\", caFile, ca)\n\t\treturn caFile, nil\n\t}\n\n\tlogDebug.Printf(\"ca file specified as %s, to download from %s\", caFile, ca)\n\t// download the ca...\n\tresp, err := grab.Get(caFile, ca)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\n\t\t\t\"problem downloading ca from %s:%s\", resp.Filename, err)\n\t}\n\treturn caFile, nil\n}", "func (c *Certinator) CreateCA(name string) (err error) {\n\tconfig := map[string]interface{}{\n\t\t\"options\": nil,\n\t\t\"default_lease_ttl\": \"0s\",\n\t\t\"max_lease_ttl\": DEFAULT_CA_MAX_LEASE,\n\t\t\"force_no_cache\": false,\n\t}\n\n\tdata := map[string]interface{}{\n\t\t\"type\": \"pki\",\n\t\t\"description\": fmt.Sprintf(\"%s certificate authority\", name),\n\t\t\"config\": config,\n\t}\n\n\tpath := fmt.Sprintf(\"sys/mounts/%s\", name)\n\n\t_, err = c.Client.Logical().Write(path, data)\n\tif err != nil {\n\t\terr = errors.Wrapf(err, \"failed creating %s CA\", name)\n\t\treturn err\n\t}\n\n\treturn err\n}", "func (o ArgoCDSpecTlsOutput) Ca() ArgoCDSpecTlsCaPtrOutput {\n\treturn o.ApplyT(func(v ArgoCDSpecTls) *ArgoCDSpecTlsCa { return v.Ca }).(ArgoCDSpecTlsCaPtrOutput)\n}", "func loadCA(caFile string) *x509.CertPool {\n\tpool := x509.NewCertPool()\n\n\tif ca, e := ioutil.ReadFile(caFile); e != nil {\n\t\tlog.Fatal(\"ReadFile: \", e)\n\t} else {\n\t\tpool.AppendCertsFromPEM(ca)\n\t}\n\treturn pool\n}", "func (c *IdentityConfig) CAConfig(caID string) (*msp.CAConfig, bool) {\n\tcfg, ok := c.caConfigs[strings.ToLower(caID)]\n\treturn cfg, ok\n}", "func pathFetchCAChain(b *backend) *framework.Path {\n\treturn &framework.Path{\n\t\tPattern: `(cert/)?ca_chain`,\n\n\t\tCallbacks: map[logical.Operation]framework.OperationFunc{\n\t\t\tlogical.ReadOperation: b.pathFetchRead,\n\t\t},\n\n\t\tHelpSynopsis: pathFetchHelpSyn,\n\t\tHelpDescription: pathFetchHelpDesc,\n\t}\n}", "func GenerateCA(c *cli.Context) error {\n\thost := c.String(\"host\")\n\n\trsaBits := c.Int(\"rsa-bits\")\n\tecdsaCurve := c.String(\"ecdsa-curve\")\n\n\tvalidFrom := c.String(\"start-date\")\n\n\tvalidFor := c.Duration(\"duration\")\n\tcert, key, err := Ca(host, rsaBits, ecdsaCurve, validFrom, validFor)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create certificate: %s\", err)\n\t}\n\tvar certname = \"0.cert\"\n\tvar keyname = \"0.key\"\n\n\tcertout, err := os.Create(certname)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to open \"+certname+\" for writing: %s\", err)\n\t}\n\tpem.Encode(certout, &cert)\n\tcertout.Close()\n\tlog.Print(\"written \" + certname + \"\\n\")\n\n\tkeyout, err := os.OpenFile(keyname, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)\n\tif err != nil {\n\t\tlog.Print(\"failed to open \"+keyname+\" for writing:\", err)\n\t\treturn nil\n\t}\n\tpem.Encode(keyout, &key)\n\tkeyout.Close()\n\tlog.Print(\"written \" + keyname + \"\\n\")\n\treturn nil\n}", "func (*CaSigningCas) GetPath() string { return \"/api/objects/ca/signing_ca/\" }", "func (mr *MockPKIServiceMockRecorder) GetCA() *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"GetCA\", reflect.TypeOf((*MockPKIService)(nil).GetCA))\n}", "func getAffiliationByCaName(affiliation, orgName, caName string, sdk *fabsdk.FabricSDK) (*msp.AffiliationResponse, error) {\n\tmspClient, err := msp.New(sdk.Context(), msp.WithOrg(orgName))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn mspClient.GetAffiliation(affiliation, msp.WithCA(caName))\n}", "func (s *LocalNamedStore) GetCertificateByName(name string) *Certificate {\n\tfor _, cert := range s.GetCertificates() {\n\t\tif cert.Domain.Contains(name) {\n\t\t\treturn cert\n\t\t}\n\t}\n\n\treturn nil\n}", "func Get(domain string) (*acm.CertificateDetail, error) {\n\t// List all issued certificates\n\tlistCertInput := acm.ListCertificatesInput{\n\t\tCertificateStatuses: []*string{aws.String(\"ISSUED\")},\n\t}\n\tlistCertOutput, err := ACM.ListCertificates(&listCertInput)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Check if any certificate covers the domain name\n\tfor _, certificate := range listCertOutput.CertificateSummaryList {\n\t\tif *certificate.DomainName == domain {\n\t\t\t// Get all certificate information for a cert given ARN\n\t\t\tdescribeCertInput := acm.DescribeCertificateInput{\n\t\t\t\tCertificateArn: certificate.CertificateArn,\n\t\t\t}\n\n\t\t\tdescribeCertOutput, describeCertErr := ACM.DescribeCertificate(&describeCertInput)\n\t\t\tif describeCertErr != nil {\n\t\t\t\tpanic(describeCertErr)\n\t\t\t}\n\n\t\t\treturn describeCertOutput.Certificate, nil\n\t\t}\n\t}\n\n\t// Check all certificates for aliases\n\tfor _, certificate := range listCertOutput.CertificateSummaryList {\n\t\t// Get all certificate information for a cert given ARN\n\t\tdescribeCertInput := acm.DescribeCertificateInput{\n\t\t\tCertificateArn: certificate.CertificateArn,\n\t\t}\n\n\t\tdescribeCertOutput, describeCertErr := ACM.DescribeCertificate(&describeCertInput)\n\t\tif describeCertErr != nil {\n\t\t\tpanic(describeCertErr)\n\t\t}\n\n\t\t// Check certificates\n\t\tfor _, alias := range describeCertOutput.Certificate.SubjectAlternativeNames {\n\t\t\t// If alias matches domain, that domain is covered by a certificate\n\t\t\tif *alias == domain {\n\t\t\t\treturn describeCertOutput.Certificate, nil\n\t\t\t}\n\t\t}\n\t}\n\n\treturn &acm.CertificateDetail{}, nil\n}", "func NewCA(config *NewCertConfig, names ...string) (*Certificate, error) {\n\tconfig.IsCA = true\n\n\tcert, err := newCert(config, names...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcertPool := x509.NewCertPool()\n\tcertPool.AddCert(cert.Cert)\n\tcert.CertPool = certPool\n\n\treturn cert, nil\n}", "func getCertificateFromLibraryByName(client *Client, name string, additionalHeader map[string]string) (*Certificate, error) {\n\tslowSearch, params := shouldDoSlowSearch(\"alias\", name)\n\n\tvar foundCertificates []*Certificate\n\tcertificates, err := getAllCertificateFromLibrary(client, params, additionalHeader)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif len(certificates) == 0 {\n\t\treturn nil, ErrorEntityNotFound\n\t}\n\tfoundCertificates = append(foundCertificates, certificates[0])\n\n\tif slowSearch {\n\t\tfoundCertificates = nil\n\t\tfor _, certificate := range certificates {\n\t\t\tif certificate.CertificateLibrary.Alias == name {\n\t\t\t\tfoundCertificates = append(foundCertificates, certificate)\n\t\t\t}\n\t\t}\n\t\tif len(foundCertificates) == 0 {\n\t\t\treturn nil, ErrorEntityNotFound\n\t\t}\n\t\tif len(foundCertificates) > 1 {\n\t\t\treturn nil, fmt.Errorf(\"more than one certificate found with name '%s'\", name)\n\t\t}\n\t}\n\n\tif len(certificates) > 1 && !slowSearch {\n\t\t{\n\t\t\treturn nil, fmt.Errorf(\"more than one certificate found with name '%s'\", name)\n\t\t}\n\t}\n\treturn foundCertificates[0], nil\n}", "func CA(cn string) x509.Certificate {\n\treturn x509.Certificate{\n\t\tSubject: pkix.Name{\n\t\t\tCommonName: cn,\n\t\t},\n\t\tIsCA: true,\n\t\tKeyUsage: x509.KeyUsageCertSign | x509.KeyUsageCRLSign | x509.KeyUsageDigitalSignature,\n\t\tExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth, x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageOCSPSigning},\n\t}\n}", "func GetCaMaterial(ctx *pulumi.Context) string {\n\treturn config.Get(ctx, \"docker:caMaterial\")\n}", "func (h *Handler) GetCACaps(ctx context.Context) (SCEPResponse, error) {\n\n\tcaps := h.Auth.GetCACaps(ctx)\n\n\tresponse := SCEPResponse{\n\t\tOperation: opnGetCACaps,\n\t\tData: formatCapabilities(caps),\n\t}\n\n\treturn response, nil\n}", "func (m *MockCAClient) GetCAInfo() (*api.GetCAInfoResponse, error) {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"GetCAInfo\")\n\tret0, _ := ret[0].(*api.GetCAInfoResponse)\n\tret1, _ := ret[1].(error)\n\treturn ret0, ret1\n}", "func (s *SecurityConfig) RootCA() *RootCA {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\treturn s.rootCA\n}", "func (o ArgoCDSpecTlsPtrOutput) Ca() ArgoCDSpecTlsCaPtrOutput {\n\treturn o.ApplyT(func(v *ArgoCDSpecTls) *ArgoCDSpecTlsCa {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Ca\n\t}).(ArgoCDSpecTlsCaPtrOutput)\n}", "func (c *Catalog) DefaultCA(name string, ca credsgen.Certificate) corev1.Secret {\n\treturn corev1.Secret{\n\t\tObjectMeta: metav1.ObjectMeta{Name: name},\n\t\tData: map[string][]byte{\n\t\t\t\"ca\": ca.Certificate,\n\t\t\t\"ca_key\": ca.PrivateKey,\n\t\t},\n\t}\n}", "func CABundleName(stackName string) string {\n\treturn fmt.Sprintf(\"%s-ca-bundle\", stackName)\n}", "func (op *outputProvider) GetCertAuthority(ctx context.Context, id types.CertAuthID, loadKeys bool) (types.CertAuthority, error) {\n\treturn op.impersonatedClient.GetCertAuthority(ctx, id, loadKeys)\n}", "func RoleNameForCertAuthority(name string) string {\n\treturn \"ca:\" + name\n}", "func (o LookupOrganizationResultOutput) CaCertificate() pulumi.StringOutput {\n\treturn o.ApplyT(func(v LookupOrganizationResult) string { return v.CaCertificate }).(pulumi.StringOutput)\n}", "func (o GithubEnterpriseConfigOutput) SslCa() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *GithubEnterpriseConfig) pulumi.StringOutput { return v.SslCa }).(pulumi.StringOutput)\n}", "func (*CaCsrs) GetPath() string { return \"/api/objects/ca/csr/\" }", "func (cm *ComponentManager) Get(name string) Component {\n\tfor _, c := range cm.cc {\n\t\tif c.Name() == name {\n\t\t\treturn c\n\t\t}\n\t}\n\n\treturn nil\n}", "func getCACert(configURL string, certHash string, timeout int64) ([]byte, error) {\n\tc := &http.Client{Timeout: time.Second * time.Duration(timeout)}\n\tresp, err := c.Get(fixURL(configURL) + \"cacert\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tp, err := readBody(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ttestHash, err := cahash.Hash(p)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tcertHashb, err := hex.DecodeString(certHash)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !bytes.Equal(testHash, certHashb) {\n\t\treturn nil, ErrHashWrong\n\t}\n\treturn p, nil\n}", "func (o ArgoCDSpecGrafanaRouteTlsOutput) CaCertificate() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ArgoCDSpecGrafanaRouteTls) *string { return v.CaCertificate }).(pulumi.StringPtrOutput)\n}", "func getCountryCode(name string) (string, error) {\n\tfor _, country := range consts.Countries {\n\t\tif strings.EqualFold(strings.ToUpper(country.Name), strings.ToUpper(name)) {\n\t\t\treturn country.Alpha3Code, nil\n\t\t}\n\t}\n\treturn \"OTH\", fmt.Errorf(\"cannot find country code for country %s\", name)\n}", "func fetchCAInfo(ctx context.Context, req *logical.Request) (*caInfoBundle, error) {\n\tbundleEntry, err := req.Storage.Get(ctx, \"config/ca_bundle\")\n\tif err != nil {\n\t\treturn nil, errutil.InternalError{Err: fmt.Sprintf(\"unable to fetch local CA certificate/key: %v\", err)}\n\t}\n\tif bundleEntry == nil {\n\t\treturn nil, errutil.UserError{Err: \"backend must be configured with a CA certificate/key\"}\n\t}\n\n\tvar bundle certutil.CertBundle\n\tif err := bundleEntry.DecodeJSON(&bundle); err != nil {\n\t\treturn nil, errutil.InternalError{Err: fmt.Sprintf(\"unable to decode local CA certificate/key: %v\", err)}\n\t}\n\n\tparsedBundle, err := bundle.ToParsedCertBundle()\n\tif err != nil {\n\t\treturn nil, errutil.InternalError{Err: err.Error()}\n\t}\n\n\tif parsedBundle.Certificate == nil {\n\t\treturn nil, errutil.InternalError{Err: \"stored CA information not able to be parsed\"}\n\t}\n\n\tcaInfo := &caInfoBundle{*parsedBundle, nil}\n\n\tentries, err := getURLs(ctx, req)\n\tif err != nil {\n\t\treturn nil, errutil.InternalError{Err: fmt.Sprintf(\"unable to fetch URL information: %v\", err)}\n\t}\n\tif entries == nil {\n\t\tentries = &urlEntries{\n\t\t\tIssuingCertificates: []string{},\n\t\t\tCRLDistributionPoints: []string{},\n\t\t\tOCSPServers: []string{},\n\t\t}\n\t}\n\tcaInfo.URLs = entries\n\n\treturn caInfo, nil\n}", "func (cs *CachingAuthClient) GetCertAuthority(id services.CertAuthID, loadKeys bool) (ca services.CertAuthority, err error) {\n\tcs.fetch(params{\n\t\tkey: certKey(id, loadKeys),\n\t\tfetch: func() error {\n\t\t\tca, err = cs.ap.GetCertAuthority(id, loadKeys)\n\t\t\treturn err\n\t\t},\n\t\tuseCache: func() error {\n\t\t\tca, err = cs.trust.GetCertAuthority(id, loadKeys)\n\t\t\treturn err\n\t\t},\n\t\tupdateCache: func() (keys []string, cerr error) {\n\t\t\tif err := cs.trust.DeleteCertAuthority(id); err != nil {\n\t\t\t\tif !trace.IsNotFound(err) {\n\t\t\t\t\treturn nil, trace.Wrap(err)\n\t\t\t\t}\n\t\t\t}\n\t\t\tcs.setTTL(ca)\n\t\t\tif err := cs.trust.UpsertCertAuthority(ca); err != nil {\n\t\t\t\treturn nil, trace.Wrap(err)\n\t\t\t}\n\t\t\treturn\n\t\t},\n\t})\n\treturn\n}", "func GetCertificateAuthority(ctx *pulumi.Context,\n\tname string, id pulumi.IDInput, state *CertificateAuthorityState, opts ...pulumi.ResourceOption) (*CertificateAuthority, error) {\n\tvar resource CertificateAuthority\n\terr := ctx.ReadResource(\"consul:index/certificateAuthority:CertificateAuthority\", name, id, state, &resource, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &resource, nil\n}", "func (client *Client) GetCertificateFromLibraryByName(name string) (*Certificate, error) {\n\treturn getCertificateFromLibraryByName(client, name, nil)\n}", "func (adminOrg *AdminOrg) GetCertificateFromLibraryByName(name string) (*Certificate, error) {\n\ttenantContext, err := adminOrg.getTenantContext()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn getCertificateFromLibraryByName(adminOrg.client, name, getTenantContextHeader(tenantContext))\n}", "func FindLCA(dag *Dag, employeeOne, employeeTwo string) (string, error) {\n\troot := dag.root\n\tkeyOne := dag.contacts[employeeOne]\n\tkeyTwo := dag.contacts[employeeTwo]\n\n\tvar keyOnePath []int\n\tvar keyTwoPath []int\n\n\tif dag.isCyclic() {\n\t\treturn \"\", errors.New(\"not a DAG\") // lca not defined if not a dag\n\t}\n\tif !findPath(dag, root, &keyOnePath, keyOne) || !findPath(dag, root, &keyTwoPath, keyTwo) {\n\t\treturn \"\", nil\n\t}\n\tvar i int\n\tfor i = 0; i < len(keyOnePath) && i < len(keyTwoPath); i++ {\n\t\tif keyOnePath[i] != keyTwoPath[i] {\n\t\t\tbreak\n\t\t}\n\t}\n\treturn dag.nodes[keyOnePath[i-1]].name, nil\n}", "func getX509CACert(ctx context.Context, key config.KeyConfig, pool sPool, hostname string, ips []net.IP) (*x509.Certificate, error) {\n\t// Try parse certificate in the given location.\n\tif certBytes, err := os.ReadFile(key.X509CACertLocation); err == nil {\n\t\tblock, _ := pem.Decode(certBytes)\n\t\tif cert, err := x509.ParseCertificate(block.Bytes); err != nil {\n\t\t\tlog.Printf(\"unable to parse x509 certificate: %v\", err)\n\t\t} else if time.Now().After(cert.NotAfter) || time.Now().Before(cert.NotBefore) {\n\t\t\tlog.Printf(\"invalid x509 CA certificate: valid between %s and %s\", cert.NotBefore.Format(time.RFC822), cert.NotAfter.Format(time.RFC822))\n\t\t} else {\n\t\t\t// x509 CA certificate is good. return it.\n\t\t\treturn cert, nil\n\t\t}\n\t} else {\n\t\tlog.Printf(\"unable to read file %s: %v\", key.X509CACertLocation, err)\n\t}\n\tif !key.CreateCACertIfNotExist {\n\t\treturn nil, errors.New(\"unable to get x509 CA certificate, but CreateCACertIfNotExist is set to false\")\n\t}\n\t// Create x509 CA cert.\n\tsigner, err := pool.get(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer pool.put(signer)\n\n\tcaConfig := &crypki.CAConfig{\n\t\tCountry: key.Country,\n\t\tState: key.State,\n\t\tLocality: key.Locality,\n\t\tOrganization: key.Organization,\n\t\tOrganizationalUnit: key.OrganizationalUnit,\n\t\tCommonName: key.CommonName,\n\t\tValidityPeriod: key.ValidityPeriod,\n\t}\n\tcaConfig.LoadDefaults()\n\n\tout, err := x509cert.GenCACert(caConfig, signer, hostname, ips, signer.publicKeyAlgorithm(), signer.signAlgorithm())\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to generate x509 CA certificate: %v\", err)\n\t}\n\tif err := os.WriteFile(key.X509CACertLocation, out, 0644); err != nil {\n\t\tlog.Printf(\"new CA cert generated, but unable to write to file %s: %v\", key.X509CACertLocation, err)\n\t\tlog.Printf(\"cert generated: %q\", string(out))\n\t} else {\n\t\tlog.Printf(\"new x509 CA cert written to %s\", key.X509CACertLocation)\n\t}\n\tcd, _ := pem.Decode(out)\n\tcert, _ := x509.ParseCertificate(cd.Bytes)\n\treturn cert, nil\n}", "func (acm *AcmeFS) CreateCA(acct tlsfs.NewDomain, tos tlsfs.TOSAction) (tlsfs.TLSDomainCertificate, tlsfs.Status, error) {\n\treturn tlsfs.TLSDomainCertificate{}, tlsfs.WithStatus(tlsfs.OPFailed, tlsfs.ErrNotSupported), tlsfs.ErrNotSupported\n}", "func (t *SimpleChaincode) readCert(APIstub shim.ChaincodeStubInterface, args []string) pb.Response {\n\tvar name, jsonResp string\n\tvar err error\n\tif len(args) != 1 {\n\t\treturn shim.Error(\"Incorrect number of arguments. Expecting 1\")\n\t}\n\tname = args[0]\n\tvalAsbytes, err := APIstub.GetState(name)\n\tif err != nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Failed to get state for \" + name + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t} else if valAsbytes == nil {\n\t\tjsonResp = \"{\\\"Error\\\":\\\"Certificate does not exist: \" + name + \"\\\"}\"\n\t\treturn shim.Error(jsonResp)\n\t}\n\treturn shim.Success(valAsbytes)\n}", "func (c *CertificateClient) Get(ctx context.Context, idOrName string) (*Certificate, *Response, error) {\n\tif id, err := strconv.Atoi(idOrName); err == nil {\n\t\treturn c.GetByID(ctx, int(id))\n\t}\n\treturn c.GetByName(ctx, idOrName)\n}", "func (a *AdminKubeConfigCABundle) Name() string {\n\treturn \"Certificate (admin-kubeconfig-ca-bundle)\"\n}", "func (c *Certsrv) RetrieveCACerts() []*x509.Certificate {\n\tcaCertURL := fmt.Sprintf(\"%s/certsrv/certnew.p7b?ReqID=CACert&Renewal=0&Enc=bin\", c.Server)\n\n\tlog.Printf(\"Requesting CA certificates from %s\", caCertURL)\n\n\trequest, _ := http.NewRequest(\"GET\", caCertURL, nil)\n\trequest.SetBasicAuth(c.Username, c.Password)\n\tclient := http.Client{}\n\n\tresponse, _ := client.Do(request)\n\tcertBytes, _ := ioutil.ReadAll(response.Body)\n\n\tbag, _ := PKCS7.Parse(certBytes)\n\n\treturn bag.Certificates\n}", "func (c *IdentityConfig) CAConfig(org string) (*msp.CAConfig, error) {\n\tnetworkConfig, err := c.networkConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn c.getCAConfig(networkConfig, org)\n}", "func (r *reconciler) currentRouterCAConfigMap() (*corev1.ConfigMap, error) {\n\tname := routerCAConfigMapName()\n\tcm := &corev1.ConfigMap{}\n\tif err := r.Client.Get(context.TODO(), name, cm); err != nil {\n\t\tif errors.IsNotFound(err) {\n\t\t\treturn nil, nil\n\t\t}\n\t\treturn nil, err\n\t}\n\treturn cm, nil\n}", "func (spec *Spec) CAExpireTime() time.Time {\n\treturn spec.expiry.CA\n}", "func (spec *Spec) CAExpireTime() time.Time {\n\treturn spec.expiry.CA\n}", "func (a tlsCredentials) getCAMountPath() string {\n\tif a.clientCA.ConfigMap != nil || a.clientCA.Secret != nil {\n\t\treturn a.tlsPathForSelector(a.clientCA, \"ca\")\n\t}\n\n\treturn \"\"\n}", "func (k Keeper) GetCert(ctx sdk.Context, addr sdk.AccAddress, property string, certifier sdk.AccAddress) (cert Cert, found bool) {\n\tstore := ctx.KVStore(k.storeKey)\n\tbz := store.Get(KeyCert(addr, property, certifier))\n\tif bz == nil {\n\t\treturn\n\t}\n\tk.cdc.MustUnmarshalBinary(bz, &cert)\n\tfound = true\n\treturn\n}", "func caBundle(opts *git.AuthOptions) []byte {\n\tif opts == nil {\n\t\treturn nil\n\t}\n\treturn opts.CAFile\n}", "func SigningCASecretName(stackName string) string {\n\treturn fmt.Sprintf(\"%s-signing-ca\", stackName)\n}", "func readCACert(caCertPath string) ([]byte, error) {\n\tcaCert, err := os.ReadFile(caCertPath)\n\tif err != nil {\n\t\tlog.Errorf(\"failed to read CA cert, cert. path: %v, error: %v\", caCertPath, err)\n\t\treturn nil, fmt.Errorf(\"failed to read CA cert, cert. path: %v, error: %v\", caCertPath, err)\n\t}\n\n\tb, _ := pem.Decode(caCert)\n\tif b == nil {\n\t\treturn nil, fmt.Errorf(\"could not decode pem\")\n\t}\n\tif b.Type != \"CERTIFICATE\" {\n\t\treturn nil, fmt.Errorf(\"ca certificate contains wrong type: %v\", b.Type)\n\t}\n\tif _, err := x509.ParseCertificate(b.Bytes); err != nil {\n\t\treturn nil, fmt.Errorf(\"ca certificate parsing returns an error: %v\", err)\n\t}\n\n\treturn caCert, nil\n}", "func GenerateCA(commonName string,\n serialNumber int64,\n countryCode string,\n organizationalUnit string,\n algo string,\n ecCurve string) (rootCADER []byte, rootPrivateKeyDER []byte, err error) {\n\n notBefore := time.Now().UTC()\n notAfter := notBefore.AddDate(CAValidity, 0, 0) // (years, months. days)\n\n // Hashing algorithm should match the private key type that signs the certificate.\n // In this case we are self-signing so the key generation algorithm and signature hashing algorithm are both of the same type\n hashingAlgorithm := x509.SHA256WithRSA\n switch strings.ToUpper(algo) {\n case \"RSA\":\n // pass\n case \"ECDSA\":\n hashingAlgorithm = x509.ECDSAWithSHA256\n default:\n return nil, nil, errors.New(\"Unrecognized algorithm, valid options are RSA and ECDSA\")\n }\n\n // https://golang.org/pkg/crypto/x509/#Certificate\n myCACertTemplate := x509.Certificate{\n\n // https://golang.org/pkg/crypto/x509/pkix/#Name\n Subject: pkix.Name{\n CommonName: commonName,\n Country: []string{countryCode},\n Organization: []string{organizationalUnit},\n },\n\n NotBefore: notBefore,\n NotAfter: notAfter,\n SerialNumber: big.NewInt(serialNumber), // returns *big.Int\n KeyUsage: RootCAKeyUsage,\n\n // For CAs we at least want []x509.ExtKeyUsage{x509.ExtKeyUsageAny | x509.KeyUsageCertSign}\n // More info: https://golang.org/pkg/crypto/x509/#ExtKeyUsage\n ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageAny}, // this should work\n BasicConstraintsValid: true,\n IsCA: true,\n MaxPathLen: 3, // 1 is enough for our purpose\n SignatureAlgorithm: hashingAlgorithm, // other options are at https://golang.org/pkg/crypto/x509/#SignatureAlgorithm\n }\n\n privKey, pubKey, err := generateKeyPair(algo, ecCurve)\n if err != nil {\n return nil, nil, err\n }\n\n // https://golang.org/pkg/crypto/x509/#CreateCertificate\n // Both the signee and singer are the same template because rootCAs are always self-signed\n rootCADER, err = x509.CreateCertificate(rand.Reader, &myCACertTemplate, &myCACertTemplate, pubKey, privKey)\n if err != nil {\n return nil, nil, err\n }\n\n rootPrivateKeyDER, err = MarshalPrivateKey(privKey)\n\n return rootCADER, rootPrivateKeyDER, err\n}", "func (s *Server) loadCA(caFile string, renew bool) error {\n\tlog.Infof(\"Loading CA from %s\", caFile)\n\tvar err error\n\n\tif !util.FileExists(caFile) {\n\t\treturn errors.Errorf(\"%s file does not exist\", caFile)\n\t}\n\n\t// Creating new Viper instance, to prevent any server level environment variables or\n\t// flags from overridding the configuration options specified in the\n\t// CA config file\n\tcfg := &CAConfig{}\n\tcaViper := viper.New()\n\terr = UnmarshalConfig(cfg, caViper, caFile, false)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Need to error if no CA name provided in config file, we cannot revert to using\n\t// the name of default CA cause CA names must be unique\n\tcaName := cfg.CA.Name\n\tif caName == \"\" {\n\t\treturn errors.Errorf(\"No CA name provided in CA configuration file. CA name is required in %s\", caFile)\n\t}\n\n\t// Replace missing values in CA configuration values with values from the\n\t// default CA configuration\n\tutil.CopyMissingValues(s.CA.Config, cfg)\n\n\t// Integers and boolean values are handled outside the util.CopyMissingValues\n\t// because there is no way through reflect to detect if a value was explicitly\n\t// set to 0 or false, or it is using the default value for its type. Viper is\n\t// employed here to help detect.\n\tif !caViper.IsSet(\"registry.maxenrollments\") {\n\t\tcfg.Registry.MaxEnrollments = s.CA.Config.Registry.MaxEnrollments\n\t}\n\n\tif !caViper.IsSet(\"db.tls.enabled\") {\n\t\tcfg.DB.TLS.Enabled = s.CA.Config.DB.TLS.Enabled\n\t}\n\n\tlog.Debugf(\"CA configuration after checking for missing values: %+v\", cfg)\n\n\tca, err := newCA(caFile, cfg, s, renew)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.addCA(ca)\n\tif err != nil {\n\t\terr2 := ca.closeDB()\n\t\tif err2 != nil {\n\t\t\tlog.Errorf(\"Close DB failed: %s\", err2)\n\t\t}\n\t}\n\treturn err\n}", "func (*CaCrls) GetPath() string { return \"/api/objects/ca/crl/\" }", "func caNewCmd(cmd *cli.Cmd) {\n\tcmd.Spec = \"NAME [OPTIONS]\"\n\n\tparams := controller.NewCAParams()\n\tparams.Name = cmd.StringArg(\"NAME\", \"\", \"name of CA\")\n\n\tparams.CertFile = cmd.StringOpt(\"cert\", \"\", \"certificate PEM file\")\n\tparams.KeyFile = cmd.StringOpt(\"key\", \"\", \"key PEM file\")\n\tparams.Tags = cmd.StringOpt(\"tags\", \"NAME\", \"comma separated list of tags\")\n\tparams.CaExpiry = cmd.IntOpt(\"ca-expiry\", 365, \"CA expiry period in days\")\n\tparams.CertExpiry = cmd.IntOpt(\"cert-expiry\", 90, \"Certificate expiry period in days\")\n\tparams.KeyType = cmd.StringOpt(\"key-type\", \"ec\", \"Key type (ec or rsa)\")\n\tparams.DnLocality = cmd.StringOpt(\"dn-l\", \"\", \"Locality for DN scope\")\n\tparams.DnState = cmd.StringOpt(\"dn-st\", \"\", \"State/province for DN scope\")\n\tparams.DnOrg = cmd.StringOpt(\"dn-o\", \"\", \"Organization for DN scope\")\n\tparams.DnOrgUnit = cmd.StringOpt(\"dn-ou\", \"\", \"Organizational unit for DN scope\")\n\tparams.DnCountry = cmd.StringOpt(\"dn-c\", \"\", \"Country for DN scope\")\n\tparams.DnStreet = cmd.StringOpt(\"dn-street\", \"\", \"Street for DN scope\")\n\tparams.DnPostal = cmd.StringOpt(\"dn-postal\", \"\", \"PostalCode for DN scope\")\n\n\tcmd.Action = func() {\n\t\tapp := NewAdminApp()\n\t\tlogger.Info(\"creating new CA\")\n\n\t\tcont, err := controller.NewCA(app.env)\n\t\tif err != nil {\n\t\t\tapp.Fatal(err)\n\t\t}\n\n\t\tca, err := cont.New(params)\n\t\tif err != nil {\n\t\t\tapp.Fatal(err)\n\t\t}\n\n\t\tif ca != nil {\n\t\t\ttable := app.NewTable()\n\n\t\t\tcaData := [][]string{\n\t\t\t\t[]string{\"Id\", ca.Id()},\n\t\t\t\t[]string{\"Name\", ca.Name()},\n\t\t\t}\n\n\t\t\ttable.AppendBulk(caData)\n\n\t\t\tapp.RenderTable(table)\n\t\t}\n\n\t}\n}", "func GenerateCA() (*util_tls.KeyPair, error) {\n\tsubject := pkix.Name{\n\t\tOrganization: []string{\"Kuma\"},\n\t\tOrganizationalUnit: []string{\"Mesh\"},\n\t\tCommonName: \"Envoy Admin CA\",\n\t}\n\treturn util_tls.GenerateCA(util_tls.DefaultKeyType, subject)\n}", "func (p *aksClusterProvider) Get(ctx *provider.Context, clusterID string, identity provider.Identity) (*provider.Cluster, error) {\n\tif err := p.setup(ctx.ConfigurationItems(), identity); err != nil {\n\t\treturn nil, fmt.Errorf(\"setting up aks provider: %w\", err)\n\t}\n\tp.logger.Infow(\"getting AKS cluster\", \"id\", clusterID)\n\n\tresourceID, err := id.FromClusterID(clusterID)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"getting resource id: %w\", err)\n\t}\n\n\tclient := containerservice.NewManagedClustersClient(resourceID.SubscriptionID)\n\tclient.Authorizer = p.authorizer\n\n\tresult, err := client.Get(ctx.Context, resourceID.ResourceGroupName, resourceID.ResourceName)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"getting cluster: %w\", err)\n\t}\n\n\tcluster := &provider.Cluster{\n\t\tName: *result.Name,\n\t\tID: clusterID,\n\t}\n\n\treturn cluster, nil\n}", "func New(cfg *config.Config, opts ...Option) (*CA, error) {\n\tca := &CA{\n\t\tconfig: cfg,\n\t\topts: new(options),\n\t\tcompactStop: make(chan struct{}),\n\t}\n\tca.opts.apply(opts)\n\treturn ca.Init(cfg)\n}", "func (o ArgoCDSpecGrafanaRouteTlsPtrOutput) CaCertificate() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ArgoCDSpecGrafanaRouteTls) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.CaCertificate\n\t}).(pulumi.StringPtrOutput)\n}", "func (cs *CachingAuthClient) GetCertAuthorities(ct services.CertAuthType, loadKeys bool) ([]services.CertAuthority, error) {\n\tcs.try(func() error {\n\t\tretval, err := cs.ap.GetCertAuthorities(ct, loadKeys)\n\t\tif err == nil {\n\t\t\tcs.Lock()\n\t\t\tdefer cs.Unlock()\n\t\t\tif ct == services.UserCA {\n\t\t\t\tcs.userCAs = retval\n\t\t\t} else {\n\t\t\t\tcs.hostCAs = retval\n\t\t\t}\n\t\t}\n\t\treturn err\n\t})\n\tcs.RLock()\n\tdefer cs.RUnlock()\n\tif ct == services.UserCA {\n\t\treturn cs.userCAs, nil\n\t}\n\treturn cs.hostCAs, nil\n}", "func Get() *cobra.Command {\n\tcmd := &cobra.Command{\n\t\tUse: \"get\",\n\t\tShort: \"Gets CA certificate for SSH engine.\",\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tengineName, _ := cmd.Flags().GetString(\"engine\")\n\n\t\t\tapi, err := vault.NewAPI()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tpublicKey, err := controller.NewController(api).GetCACertificate(engineName)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tlog.Infoln(publicKey)\n\n\t\t\treturn nil\n\t\t},\n\t}\n\n\tcmd.Flags().String(\"engine\", vault.SSHEngineDefaultName, \"SSH engine path\")\n\n\treturn cmd\n}", "func getTLScerts(c, k, ca string) ([]byte, []byte, []byte, error) {\n\tres := [][]byte{}\n\tvar err error\n\tvar a []byte\n\tfor _, l := range []string{c, k, ca} {\n\t\ta, err = ioutil.ReadFile(l)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"getTLScerts failed to load file %s: %s\", l, err)\n\t\t\tbreak\n\t\t}\n\t\tres = append(res, a)\n\t}\n\tif err != nil {\n\t\tisX := false\n\t\thost := \"host\"\n\t\trsaBits := 2048\n\t\tecdsaCurve := \"\"\n\t\tvalidFor := 365 * 24 * time.Hour\n\t\tvalidFrom := \"\"\n\t\tisCA := true\n\t\tlog.Println(\"creating CA\")\n\t\tcacert, cakey, err := internal.Ca(host, rsaBits, ecdsaCurve, validFrom, validFor)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to create certificate: %s\", err)\n\t\t}\n\t\tca_key_pair, err := tls.X509KeyPair(pem.EncodeToMemory(&cacert), pem.EncodeToMemory(&cakey))\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to make ca key pair: %s\", err)\n\t\t}\n\t\tlog.Println(\"creating certificate\")\n\t\tisCA = false\n\t\tcert, priv, err := internal.CaSignedCert(cert_common_name, host, rsaBits, ecdsaCurve, validFrom, validFor, isCA, isX, &ca_key_pair)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"failed to make signed cert %s\", err)\n\t\t}\n\t\treturn pem.EncodeToMemory(&cert), pem.EncodeToMemory(&priv), pem.EncodeToMemory(&cacert), nil\n\t}\n\treturn res[0], res[1], res[2], nil\n}", "func (s CertStore) getCert(findType C.DWORD, findPara unsafe.Pointer) C.PCCERT_CONTEXT {\n\treturn C.CertFindCertificateInStore(s.hStore, C.MY_ENC_TYPE, 0, findType, findPara, nil)\n}", "func LookupCaCertificate(ctx *pulumi.Context, args *LookupCaCertificateArgs, opts ...pulumi.InvokeOption) (*LookupCaCertificateResult, error) {\n\topts = internal.PkgInvokeDefaultOpts(opts)\n\tvar rv LookupCaCertificateResult\n\terr := ctx.Invoke(\"aws-native:iot:getCaCertificate\", args, &rv, opts...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &rv, nil\n}", "func ToCASecretAndConfigMapName(kind, name string) string {\n\treturn strings.ToLower(kind) + \"-\" + name + \"-ca\"\n}" ]
[ "0.7360491", "0.726224", "0.72147197", "0.6874108", "0.68455625", "0.68152076", "0.6745675", "0.6694911", "0.6577618", "0.65335727", "0.63607407", "0.6278475", "0.620559", "0.6117742", "0.6054124", "0.605299", "0.59357685", "0.5926828", "0.59215695", "0.5918912", "0.58893293", "0.58397883", "0.58239144", "0.57313544", "0.5639739", "0.5624446", "0.5599071", "0.55881006", "0.5586986", "0.55858135", "0.55654204", "0.55633", "0.5546263", "0.5539011", "0.55280554", "0.551508", "0.5509083", "0.5504979", "0.54810596", "0.54777485", "0.5438806", "0.543195", "0.54291767", "0.5427303", "0.5424624", "0.5402963", "0.5387399", "0.538231", "0.5374052", "0.5366905", "0.53653175", "0.5360482", "0.5358148", "0.53479445", "0.5339548", "0.5337381", "0.533115", "0.5299234", "0.52979296", "0.52971524", "0.52936715", "0.5288304", "0.52735645", "0.5266777", "0.52564543", "0.523319", "0.52287614", "0.52282035", "0.5227455", "0.52121365", "0.5207129", "0.52043504", "0.5186174", "0.5155394", "0.5154163", "0.51446974", "0.5142235", "0.5117329", "0.5115635", "0.51155776", "0.51155776", "0.5105813", "0.50895476", "0.5077161", "0.5074913", "0.5072088", "0.5070764", "0.50650924", "0.5057988", "0.5056779", "0.5056214", "0.50481987", "0.5045245", "0.50374216", "0.5034446", "0.50344193", "0.5033928", "0.50171065", "0.5015127", "0.49862114" ]
0.7994458
0
Register all endpoint handlers
func (s *Server) registerHandlers() { s.mux.Use(s.cors, s.middleware) s.registerHandler(newCAInfoEndpoint(s)) s.registerHandler(newRegisterEndpoint(s)) s.registerHandler(newEnrollEndpoint(s)) s.registerHandler(newIdemixEnrollEndpoint(s)) s.registerHandler(newIdemixCRIEndpoint(s)) s.registerHandler(newReenrollEndpoint(s)) s.registerHandler(newRevokeEndpoint(s)) s.registerHandler(newGenCRLEndpoint(s)) s.registerHandler(newIdentitiesStreamingEndpoint(s)) s.registerHandler(newIdentitiesEndpoint(s)) s.registerHandler(newAffiliationsStreamingEndpoint(s)) s.registerHandler(newAffiliationsEndpoint(s)) s.registerHandler(newCertificateEndpoint(s)) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(\"/v1/api/claims\", wrapper.GetClaims)\n\trouter.POST(\"/v1/api/claims\", wrapper.CreateClaim)\n\trouter.GET(\"/v1/api/claims/find\", wrapper.FindClaimByName)\n\trouter.DELETE(\"/v1/api/claims/:id\", wrapper.DeleteClaim)\n\trouter.GET(\"/v1/api/claims/:id\", wrapper.GetClaim)\n\trouter.PUT(\"/v1/api/claims/:id\", wrapper.UpdateClaim)\n\trouter.GET(\"/v1/api/scopes\", wrapper.GetScopes)\n\trouter.POST(\"/v1/api/scopes\", wrapper.CreateScope)\n\trouter.GET(\"/v1/api/scopes/find\", wrapper.FindScopeByName)\n\trouter.DELETE(\"/v1/api/scopes/:id\", wrapper.DeleteScope)\n\trouter.GET(\"/v1/api/scopes/:id\", wrapper.GetScope)\n\trouter.PUT(\"/v1/api/scopes/:id\", wrapper.UpdateScope)\n\trouter.POST(\"/v1/api/scopes/:id/claim\", wrapper.AddClaimToScope)\n\trouter.DELETE(\"/v1/api/scopes/:id/claim/:claimId\", wrapper.RemoveClaimFromScope)\n\trouter.GET(\"/v1/api/secretchannels\", wrapper.GetSecretChannels)\n\trouter.POST(\"/v1/api/secretchannels\", wrapper.CreateSecretChannel)\n\trouter.GET(\"/v1/api/secretchannels/find/algouse\", wrapper.FindSecretChannelByAlgouse)\n\trouter.GET(\"/v1/api/secretchannels/find/name\", wrapper.FindSecretChannelByName)\n\trouter.DELETE(\"/v1/api/secretchannels/:id\", wrapper.DeleteSecretChannel)\n\trouter.GET(\"/v1/api/secretchannels/:id\", wrapper.GetSecretChannel)\n\trouter.POST(\"/v1/api/secretchannels/:id\", wrapper.RenewSecretChannel)\n\trouter.GET(\"/v1/api/serviceproviders\", wrapper.GetServiceProviders)\n\trouter.POST(\"/v1/api/serviceproviders\", wrapper.CreateServiceProvider)\n\trouter.GET(\"/v1/api/serviceproviders/find\", wrapper.FindServiceProvider)\n\trouter.DELETE(\"/v1/api/serviceproviders/:id\", wrapper.DeleteServiceProvider)\n\trouter.GET(\"/v1/api/serviceproviders/:id\", wrapper.GetServiceProvider)\n\trouter.PATCH(\"/v1/api/serviceproviders/:id\", wrapper.PatchServiceProvider)\n\trouter.PUT(\"/v1/api/serviceproviders/:id\", wrapper.UpdateServiceProvider)\n\trouter.GET(\"/v1/api/serviceproviders/:id/credentials\", wrapper.GetCredentials)\n\trouter.POST(\"/v1/api/serviceproviders/:id/credentials\", wrapper.GenerateCredentials)\n\trouter.POST(\"/v1/api/serviceproviders/:id/status\", wrapper.UpdateServiceProviderStatus)\n\trouter.GET(\"/v1/api/users\", wrapper.GetUsers)\n\trouter.POST(\"/v1/api/users\", wrapper.CreateUser)\n\trouter.GET(\"/v1/api/users/find\", wrapper.FindUser)\n\trouter.POST(\"/v1/api/users/recover/password\", wrapper.InitiatePasswordRecovery)\n\trouter.PUT(\"/v1/api/users/recover/password\", wrapper.ResetUserPassword)\n\trouter.DELETE(\"/v1/api/users/:id\", wrapper.DeleteUser)\n\trouter.GET(\"/v1/api/users/:id\", wrapper.GetUser)\n\trouter.PUT(\"/v1/api/users/:id\", wrapper.UpdateUser)\n\trouter.POST(\"/v1/api/users/:id/password\", wrapper.ChangeUserPassword)\n\trouter.POST(\"/v1/api/users/:id/status\", wrapper.UpdateUserStatus)\n\n}", "func (ws *WebServer) registerHandlers() {\n\t// --------------------------------\n\t// AVAILABLE WITHOUT AUTH\n\n\tws.router.Use(\n\t\tws.addHeaders, ws.optionsHandler,\n\t\tws.handlerFiles, ws.handleMetrics)\n\n\tws.router.Get(\"/ota\", ws.handlerGetOta)\n\n\timagestore := ws.router.Group(\"/imagestore\")\n\timagestore.\n\t\tGet(\"/<id>\", ws.handlerGetImage)\n\n\tutils := ws.router.Group(\"/api/util\")\n\tutils.\n\t\tGet(`/color/<hexcode:[\\da-fA-F]{6,8}>`, ws.handlerGetColor)\n\tutils.\n\t\tGet(\"/commands\", ws.handlerGetCommands)\n\tutils.\n\t\tGet(\"/landingpageinfo\", ws.handlerGetLandingPageInfo)\n\n\tws.router.Get(\"/invite\", ws.handlerGetInvite)\n\n\t// --------------------------------\n\t// ONLY AVAILABLE AFTER AUTH\n\n\tws.router.Get(endpointLogInWithDC, ws.dcoauth.HandlerInit)\n\tws.router.Get(endpointAuthCB, ws.dcoauth.HandlerCallback)\n\n\tws.router.Use(ws.auth.checkAuth)\n\tif !util.DevModeEnabled {\n\t\tws.router.Use(ws.af.Handler)\n\t}\n\n\tapi := ws.router.Group(\"/api\")\n\tapi.\n\t\tGet(\"/me\", ws.af.SessionSetHandler, ws.handlerGetMe)\n\tapi.\n\t\tPost(\"/logout\", ws.auth.LogOutHandler)\n\tapi.\n\t\tGet(\"/sysinfo\", ws.handlerGetSystemInfo)\n\n\tsettings := api.Group(\"/settings\")\n\tsettings.\n\t\tGet(\"/presence\", ws.handlerGetPresence).\n\t\tPost(ws.handlerPostPresence)\n\tsettings.\n\t\tGet(\"/noguildinvite\", ws.handlerGetInviteSettings).\n\t\tPost(ws.handlerPostInviteSettings)\n\n\tguilds := api.Group(\"/guilds\")\n\tguilds.\n\t\tGet(\"\", ws.handlerGuildsGet)\n\n\tguild := guilds.Group(\"/<guildid:[0-9]+>\")\n\tguild.\n\t\tGet(\"\", ws.handlerGuildsGetGuild)\n\tguild.\n\t\tGet(\"/permissions\", ws.handlerGetGuildPermissions).\n\t\tPost(ws.handlerPostGuildPermissions)\n\tguild.\n\t\tGet(\"/members\", ws.handlerGetGuildMembers)\n\tguild.\n\t\tPost(\"/inviteblock\", ws.handlerPostGuildInviteBlock)\n\tguild.\n\t\tGet(\"/scoreboard\", ws.handlerGetGuildScoreboard)\n\tguild.\n\t\tGet(\"/antiraid/joinlog\", ws.handlerGetGuildAntiraidJoinlog).\n\t\tDelete(ws.handlerDeleteGuildAntiraidJoinlog)\n\n\tguildUnbanRequests := guild.Group(\"/unbanrequests\")\n\tguildUnbanRequests.\n\t\tGet(\"\", ws.handlerGetGuildUnbanrequests)\n\tguildUnbanRequests.\n\t\tGet(\"/count\", ws.handlerGetGuildUnbanrequestsCount)\n\tguildUnbanRequests.\n\t\tGet(\"/<id:[0-9]+>\", ws.handlerGetGuildUnbanrequest).\n\t\tPost(ws.handlerPostGuildUnbanrequest)\n\n\tguildSettings := guild.Group(\"/settings\")\n\tguildSettings.\n\t\tGet(\"/karma\", ws.handlerGetGuildSettingsKarma).\n\t\tPost(ws.handlerPostGuildSettingsKarma)\n\tguildSettings.\n\t\tGet(\"/antiraid\", ws.handlerGetGuildSettingsAntiraid).\n\t\tPost(ws.handlerPostGuildSettingsAntiraid)\n\n\tguildSettingsKarmaBlocklist := guildSettings.Group(\"/karma/blocklist\")\n\tguildSettingsKarmaBlocklist.\n\t\tGet(\"\", ws.handlerGetGuildSettingsKarmaBlocklist)\n\tguildSettingsKarmaBlocklist.\n\t\tPut(\"/<memberid>\", ws.handlerPutGuildSettingsKarmaBlocklist).\n\t\tDelete(ws.handlerDeleteGuildSettingsKarmaBlocklist)\n\n\tguild.\n\t\tGet(\"/settings\", ws.handlerGetGuildSettings).\n\t\tPost(ws.handlerPostGuildSettings)\n\n\tguildReports := guild.Group(\"/reports\")\n\tguildReports.\n\t\tGet(\"\", ws.handlerGetReports)\n\tguildReports.\n\t\tGet(\"/count\", ws.handlerGetReportsCount)\n\n\tguildBackups := guild.Group(\"/backups\")\n\tguildBackups.\n\t\tGet(\"\", ws.handlerGetGuildBackups)\n\tguildBackups.\n\t\tPost(\"/toggle\", ws.handlerPostGuildBackupsToggle)\n\tguildBackups.\n\t\tGet(\"/<backupid:[0-9]+>/download\", ws.handlerGetGuildBackupDownload)\n\n\tmember := guilds.Group(\"/<guildid:[0-9]+>/<memberid:[0-9]+>\")\n\tmember.\n\t\tGet(\"\", ws.handlerGuildsGetMember)\n\tmember.\n\t\tGet(\"/permissions\", ws.handlerGetMemberPermissions)\n\tmember.\n\t\tGet(\"/permissions/allowed\", ws.handlerGetMemberPermissionsAllowed)\n\tmember.\n\t\tPost(\"/kick\", ws.handlerPostGuildMemberKick)\n\tmember.\n\t\tPost(\"/ban\", ws.handlerPostGuildMemberBan)\n\tmember.\n\t\tPost(\"/mute\", ws.handlerPostGuildMemberMute)\n\tmember.\n\t\tPost(\"/unmute\", ws.handlerPostGuildMemberUnmute)\n\tmember.\n\t\tGet(\"/unbanrequests\", ws.handlerGetGuildMemberUnbanrequests)\n\n\tmemberReports := member.Group(\"/reports\")\n\tmemberReports.\n\t\tGet(\"\", ws.handlerGetReports).\n\t\tPost(ws.handlerPostGuildMemberReport)\n\tmemberReports.\n\t\tGet(\"/count\", ws.handlerGetReportsCount)\n\n\treports := api.Group(\"/reports\")\n\treport := reports.Group(\"/<id:[0-9]+>\")\n\treport.\n\t\tGet(\"\", ws.handlerGetReport)\n\treport.\n\t\tPost(\"/revoke\", ws.handlerPostReportRevoke)\n\n\tunbanReqeusts := api.Group(\"/unbanrequests\")\n\tunbanReqeusts.\n\t\tGet(\"\", ws.handlerGetUnbanrequest).\n\t\tPost(ws.handlerPostUnbanrequest)\n\tunbanReqeusts.\n\t\tGet(\"/bannedguilds\", ws.handlerGetUnbanrequestBannedguilds)\n\n\tapi.\n\t\tGet(\"/token\", ws.handlerGetToken).\n\t\tPost(ws.handlerPostToken).\n\t\tDelete(ws.handlerDeleteToken)\n\n\tusersettings := api.Group(\"/usersettings\")\n\tusersettings.\n\t\tGet(\"/ota\", ws.handlerGetUsersettingsOta).\n\t\tPost(ws.handlerPostUsersettingsOta)\n}", "func RegisterRestEndpoints(endpointInsts map[string]RestEndpointInst) {\n\n\tfor url, endpointInst := range endpointInsts {\n\t\tregistered[url] = endpointInst\n\n\t\tHandleFunc(url, func() func(w http.ResponseWriter, r *http.Request) {\n\t\t\tvar handlerURL = url\n\t\t\tvar handlerInst = endpointInst\n\n\t\t\treturn func(w http.ResponseWriter, r *http.Request) {\n\n\t\t\t\t// Create a new handler instance\n\n\t\t\t\thandler := handlerInst()\n\n\t\t\t\t// Handle request in appropriate method\n\n\t\t\t\tres := strings.TrimSpace(r.URL.Path[len(handlerURL):])\n\n\t\t\t\tif len(res) > 0 && res[len(res)-1] == '/' {\n\t\t\t\t\tres = res[:len(res)-1]\n\t\t\t\t}\n\n\t\t\t\tvar resources []string\n\n\t\t\t\tif res != \"\" {\n\t\t\t\t\tresources = strings.Split(res, \"/\")\n\t\t\t\t}\n\n\t\t\t\tswitch r.Method {\n\t\t\t\tcase \"GET\":\n\t\t\t\t\thandler.HandleGET(w, r, resources)\n\n\t\t\t\tcase \"POST\":\n\t\t\t\t\thandler.HandlePOST(w, r, resources)\n\n\t\t\t\tcase \"PUT\":\n\t\t\t\t\thandler.HandlePUT(w, r, resources)\n\n\t\t\t\tcase \"DELETE\":\n\t\t\t\t\thandler.HandleDELETE(w, r, resources)\n\n\t\t\t\tdefault:\n\t\t\t\t\thttp.Error(w, http.StatusText(http.StatusMethodNotAllowed),\n\t\t\t\t\t\thttp.StatusMethodNotAllowed)\n\t\t\t\t}\n\t\t\t}\n\t\t}())\n\t}\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(\"/architectures/:distribution\", wrapper.GetArchitectures)\n\trouter.POST(\"/compose\", wrapper.ComposeImage)\n\trouter.GET(\"/composes\", wrapper.GetComposes)\n\trouter.GET(\"/composes/:composeId\", wrapper.GetComposeStatus)\n\trouter.GET(\"/composes/:composeId/metadata\", wrapper.GetComposeMetadata)\n\trouter.GET(\"/distributions\", wrapper.GetDistributions)\n\trouter.GET(\"/openapi.json\", wrapper.GetOpenapiJson)\n\trouter.GET(\"/packages\", wrapper.GetPackages)\n\trouter.GET(\"/ready\", wrapper.GetReadiness)\n\trouter.GET(\"/version\", wrapper.GetVersion)\n\n}", "func RegisterHandlers(r *routing.RouteGroup, service Service, logger log.Logger) {\n\tres := resource{service, logger}\n\tr.Post(\"/webhooks\", res.create)\n r.Delete(\"/webhooks/<id>\", res.delete)\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.POST(\"/search/apiCatalog/entries\", wrapper.SearchApiCatalog1)\n\trouter.GET(\"/search/apiCatalog/namespaces\", wrapper.GetApiNamespaces1)\n\trouter.POST(\"/search/apis\", wrapper.SearchApis1)\n\trouter.POST(\"/search/clients\", wrapper.SearchClients1)\n\trouter.POST(\"/search/organizations\", wrapper.SearchOrgs1)\n\trouter.POST(\"/search/roles\", wrapper.SearchRoles1)\n\trouter.POST(\"/search/users\", wrapper.SearchUsers1)\n\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(\"/gateways\", wrapper.ListGateways)\n\trouter.POST(\"/gateways\", wrapper.PostGateways)\n\trouter.PUT(\"/gateways\", wrapper.PutGateways)\n\trouter.DELETE(\"/gateways/:gatewayId\", wrapper.DeleteGatewayById)\n\trouter.GET(\"/gateways/:gatewayId\", wrapper.GetGatewayById)\n\trouter.PUT(\"/gateways/:gatewayId\", wrapper.PutGatewayById)\n\trouter.GET(\"/gateways/:gatewayId/endpoint\", wrapper.GetGatewayEndpoint1)\n\n}", "func (c *Operation) registerHandler() {\n\t// Add more protocol endpoints here to expose them as controller API endpoints\n\tc.handlers = []Handler{\n\t\tsupport.NewHTTPHandler(login, http.MethodGet, c.login),\n\t\tsupport.NewHTTPHandler(settings, http.MethodGet, c.settings),\n\t\tsupport.NewHTTPHandler(getCreditScore, http.MethodGet, c.getCreditScore),\n\t\tsupport.NewHTTPHandler(callback, http.MethodGet, c.callback),\n\t\tsupport.NewHTTPHandler(oidcRedirectPath, http.MethodGet, c.oidcRedirect),\n\n\t\t// issuer rest apis (html decoupled)\n\t\tsupport.NewHTTPHandler(authPath, http.MethodGet, c.auth),\n\t\tsupport.NewHTTPHandler(searchPath, http.MethodGet, c.search),\n\t\tsupport.NewHTTPHandler(verifyDIDAuthPath, http.MethodPost, c.verifyDIDAuthHandler),\n\t\tsupport.NewHTTPHandler(createCredentialPath, http.MethodPost, c.createCredentialHandler),\n\t\tsupport.NewHTTPHandler(generateCredentialPath, http.MethodPost, c.generateCredentialHandler),\n\n\t\t// chapi\n\t\tsupport.NewHTTPHandler(revoke, http.MethodPost, c.revokeVC),\n\t\tsupport.NewHTTPHandler(generate, http.MethodPost, c.generateVC),\n\n\t\t// didcomm\n\t\tsupport.NewHTTPHandler(didcommToken, http.MethodPost, c.didcommTokenHandler),\n\t\tsupport.NewHTTPHandler(didcommCallback, http.MethodGet, c.didcommCallbackHandler),\n\t\tsupport.NewHTTPHandler(didcommCredential, http.MethodPost, c.didcommCredentialHandler),\n\t\tsupport.NewHTTPHandler(didcommAssuranceData, http.MethodPost, c.didcommAssuraceHandler),\n\n\t\tsupport.NewHTTPHandler(didcommInit, http.MethodGet, c.initiateDIDCommConnection),\n\t\tsupport.NewHTTPHandler(didcommUserEndpoint, http.MethodGet, c.getIDHandler),\n\n\t\t// oidc\n\t\tsupport.NewHTTPHandler(oauth2GetRequestPath, http.MethodGet, c.createOIDCRequest),\n\t\tsupport.NewHTTPHandler(oauth2CallbackPath, http.MethodGet, c.handleOIDCCallback),\n\n\t\t// JSON-LD contexts API\n\t\tsupport.NewHTTPHandler(jsonldcontextrest.AddContextPath, http.MethodPost, c.addJSONLDContextHandler),\n\t}\n}", "func RegisterEndpoints(router *fasthttprouter.Router) {\n\thandle(router, \"POST\", \"/submit/question\", SubmitQuestion)\n\thandle(router, \"POST\", \"/submit/answer\", SubmitAnswer)\n\thandleWs(router, wsPath)\n\thandleFS(router, \"/\", \"public\")\n\thandleFS(router, \"/images/*filepath\", \"public/images\")\n\thandleFS(router, \"/js/*filepath\", \"public/js\")\n\thandleFS(router, \"/vendor/*filepath\", \"public/vendor\")\n\thandleFS(router, \"/css/*filepath\", \"public/css\")\n\thandleNotFound(router)\n}", "func RegisterHandlers(router runtime.EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.POST(\"/v1/auth/auth\", wrapper.PerformAuth)\n\trouter.GET(\"/v1/auth/check\", wrapper.Check)\n\trouter.POST(\"/v1/auth/passwordreset/request\", wrapper.Passwordresetrequest)\n\trouter.POST(\"/v1/auth/passwordreset/verify\", wrapper.Passwordresetverify)\n\trouter.POST(\"/v1/auth/register\", wrapper.CreateAccount)\n\trouter.POST(\"/v1/auth/verifyemail\", wrapper.Verify)\n\n}", "func (s *server) RegisterHandlers(rs *runtime.ServeMux, conn *grpc.ClientConn) {\n\tttnpb.RegisterApplicationPackageRegistryHandler(s.ctx, rs, conn)\n\tfor _, subsystem := range s.handlers {\n\t\tif subsystem, ok := subsystem.(rpcserver.ServiceRegisterer); ok {\n\t\t\tsubsystem.RegisterHandlers(rs, conn)\n\t\t}\n\t}\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\tRegisterHandlersWithBaseURL(router, si, \"\")\n}", "func (e *Endpoint) RegisterRoutes(g *echo.Group) {\n\tg.GET(\"/health\", e.Check)\n}", "func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(baseURL+\"/accounts\", wrapper.GetAccounts)\n\trouter.GET(baseURL+\"/accounts/:account/:currency\", wrapper.GetAccountByTypeAndCurrency)\n\trouter.GET(baseURL+\"/deposits\", wrapper.GetDeposits)\n\trouter.POST(baseURL+\"/deposits/:currency\", wrapper.GetDepositAddress)\n\trouter.GET(baseURL+\"/deposits/:depositId\", wrapper.GetDepositById)\n\trouter.GET(baseURL+\"/fees\", wrapper.GetFees)\n\trouter.GET(baseURL+\"/fills\", wrapper.GetFills)\n\trouter.GET(baseURL+\"/l2/:symbol\", wrapper.GetL2OrderBook)\n\trouter.GET(baseURL+\"/l3/:symbol\", wrapper.GetL3OrderBook)\n\trouter.DELETE(baseURL+\"/orders\", wrapper.DeleteAllOrders)\n\trouter.GET(baseURL+\"/orders\", wrapper.GetOrders)\n\trouter.POST(baseURL+\"/orders\", wrapper.CreateOrder)\n\trouter.DELETE(baseURL+\"/orders/:orderId\", wrapper.DeleteOrder)\n\trouter.GET(baseURL+\"/orders/:orderId\", wrapper.GetOrderById)\n\trouter.GET(baseURL+\"/symbols\", wrapper.GetSymbols)\n\trouter.GET(baseURL+\"/symbols/:symbol\", wrapper.GetSymbolByName)\n\trouter.GET(baseURL+\"/tickers\", wrapper.GetTickers)\n\trouter.GET(baseURL+\"/tickers/:symbol\", wrapper.GetTickerBySymbol)\n\trouter.GET(baseURL+\"/trades\", wrapper.GetTrades)\n\trouter.GET(baseURL+\"/whitelist\", wrapper.GetWhitelist)\n\trouter.GET(baseURL+\"/whitelist/:currency\", wrapper.GetWhitelistByCurrency)\n\trouter.GET(baseURL+\"/withdrawals\", wrapper.GetWithdrawals)\n\trouter.POST(baseURL+\"/withdrawals\", wrapper.CreateWithdrawal)\n\trouter.GET(baseURL+\"/withdrawals/:withdrawalId\", wrapper.GetWithdrawalById)\n\n}", "func RegisterHandlers(router runtime.EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(\"/zones\", wrapper.GetZones)\n\trouter.DELETE(\"/zones/:zone\", wrapper.DeleteZone)\n\trouter.GET(\"/zones/:zone\", wrapper.GetZone)\n\trouter.POST(\"/zones/:zone\", wrapper.UpdateZone)\n\trouter.PUT(\"/zones/:zone\", wrapper.CreateZone)\n\trouter.DELETE(\"/zones/:zone/:domain/:recordType\", wrapper.DeleteDomain)\n\trouter.GET(\"/zones/:zone/:domain/:recordType\", wrapper.GetDomain)\n\trouter.POST(\"/zones/:zone/:domain/:recordType\", wrapper.UpdateDomain)\n\trouter.PUT(\"/zones/:zone/:domain/:recordType\", wrapper.CreateDomain)\n\n}", "func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(baseURL+\"/customers\", wrapper.GetCustomers)\n\trouter.POST(baseURL+\"/customers\", wrapper.PostCustomers)\n\trouter.DELETE(baseURL+\"/customers/:id\", wrapper.DeleteCustomersId)\n\trouter.GET(baseURL+\"/customers/:id\", wrapper.GetCustomersId)\n\trouter.PUT(baseURL+\"/customers/:id\", wrapper.PutCustomersId)\n\trouter.GET(baseURL+\"/employees\", wrapper.GetEmployees)\n\trouter.POST(baseURL+\"/employees\", wrapper.PostEmployees)\n\trouter.DELETE(baseURL+\"/employees/:id\", wrapper.DeleteEmployeesId)\n\trouter.GET(baseURL+\"/employees/:id\", wrapper.GetEmployeesId)\n\trouter.PUT(baseURL+\"/employees/:id\", wrapper.PutEmployeesId)\n\trouter.GET(baseURL+\"/expenses\", wrapper.GetExpenses)\n\trouter.POST(baseURL+\"/expenses\", wrapper.PostExpenses)\n\trouter.DELETE(baseURL+\"/expenses/:id\", wrapper.DeleteExpensesId)\n\trouter.GET(baseURL+\"/expenses/:id\", wrapper.GetExpensesId)\n\trouter.PUT(baseURL+\"/expenses/:id\", wrapper.PutExpensesId)\n\trouter.GET(baseURL+\"/invoices\", wrapper.GetInvoices)\n\trouter.POST(baseURL+\"/invoices\", wrapper.PostInvoices)\n\trouter.DELETE(baseURL+\"/invoices/:id\", wrapper.DeleteInvoicesId)\n\trouter.GET(baseURL+\"/invoices/:id\", wrapper.GetInvoicesId)\n\trouter.PUT(baseURL+\"/invoices/:id\", wrapper.PutInvoicesId)\n\trouter.GET(baseURL+\"/misc_records\", wrapper.GetMiscRecords)\n\trouter.POST(baseURL+\"/misc_records\", wrapper.PostMiscRecords)\n\trouter.DELETE(baseURL+\"/misc_records/:id\", wrapper.DeleteMiscRecordsId)\n\trouter.GET(baseURL+\"/misc_records/:id\", wrapper.GetMiscRecordsId)\n\trouter.PUT(baseURL+\"/misc_records/:id\", wrapper.PutMiscRecordsId)\n\trouter.GET(baseURL+\"/projects\", wrapper.GetProjects)\n\trouter.POST(baseURL+\"/projects\", wrapper.PostProjects)\n\trouter.DELETE(baseURL+\"/projects/:id\", wrapper.DeleteProjectsId)\n\trouter.GET(baseURL+\"/projects/:id\", wrapper.GetProjectsId)\n\trouter.PUT(baseURL+\"/projects/:id\", wrapper.PutProjectsId)\n\n}", "func (self *CentralBooking) InstallHandlers(router *mux.Router) {\n router.\n Methods(\"POST\").\n Path(\"/register/instance\").\n HandlerFunc(self.RegisterInstance)\n\n // apeing vault\n router.\n Methods(\"GET\").\n Path(\"/sys/health\").\n HandlerFunc(self.CheckHealth)\n}", "func (o *Operation) registerHandler() {\n\t// Add more protocol endpoints here to expose them as controller API endpoints\n\to.handlers = []operation.Handler{\n\t\tsupport.NewHTTPHandler(createPublicDIDPath, http.MethodPost, o.CreatePublicDID),\n\t\tsupport.NewHTTPHandler(registerMsgService, http.MethodPost, o.RegisterMessageService),\n\t\tsupport.NewHTTPHandler(unregisterMsgService, http.MethodPost, o.UnregisterMessageService),\n\t\tsupport.NewHTTPHandler(msgServiceList, http.MethodGet, o.RegisteredServices),\n\t\tsupport.NewHTTPHandler(sendNewMsg, http.MethodPost, o.SendNewMessage),\n\t\tsupport.NewHTTPHandler(sendReplyMsg, http.MethodPost, o.SendReplyMessage),\n\t\tsupport.NewHTTPHandler(registerHTTPOverDIDCommService, http.MethodPost, o.RegisterHTTPMessageService),\n\t}\n}", "func RegisterHandlers(r *routing.RouteGroup, service Service, authHandler routing.Handler, logger log.Logger) {\n\tres := resource{service, logger}\n\tr.Use(authHandler)\n\t// the following endpoints require a valid JWT\n\tr.Get(\"/users/<id>\", res.get)\n\tr.Get(\"/users\", res.query)\n}", "func RegisterHandlers(g *echo.Group, service Service, logger log.Logger,\n\tmailer Mailer, templater tpl.Service, UIAddress string) {\n\n\tres := resource{service, logger, mailer, templater, UIAddress}\n\n\tg.POST(\"/auth/login/\", res.login)\n\tg.POST(\"/auth/reset-password/\", res.resetPassword)\n\tg.POST(\"/auth/password/\", res.createNewPassword)\n\tg.GET(\"/auth/verify-account/\", res.verifyAccount)\n}", "func (c *Operation) registerHandler() {\n\t// Add more protocol endpoints here to expose them as controller API endpoints\n\tc.handlers = []rest.Handler{\n\t\tcmdutil.NewHTTPHandler(CreateBlocDIDPath, http.MethodPost, c.CreateTrustBlocDID),\n\t\tcmdutil.NewHTTPHandler(CreatePeerDIDPath, http.MethodPost, c.CreatePeerDID),\n\t}\n}", "func CreateEndpointHandlers(lbc *controller.LoadBalancerController) cache.ResourceEventHandlerFuncs {\n\treturn cache.ResourceEventHandlerFuncs{\n\t\tAddFunc: func(obj interface{}) {\n\t\t\tendpoint := obj.(*api_v1.Endpoints)\n\t\t\tlog.Printf(\"Adding endpoints: %v\", endpoint.Name)\n\t\t\tlbc.AddSyncQueue(obj)\n\t\t},\n\t\tDeleteFunc: func(obj interface{}) {\n\t\t\tendpoint, isEndpoint := obj.(*api_v1.Endpoints)\n\t\t\tif !isEndpoint {\n\t\t\t\tdeletedState, ok := obj.(cache.DeletedFinalStateUnknown)\n\t\t\t\tif !ok {\n\t\t\t\t\tlog.Printf(\"Error received unexpected object: %v\", obj)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tendpoint, ok = deletedState.Obj.(*api_v1.Endpoints)\n\t\t\t\tif !ok {\n\t\t\t\t\tlog.Printf(\"Error DeletedFinalStateUnknown contained non-Endpoints object: %v\", deletedState.Obj)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t\tlog.Printf(\"Removing endpoints: %v\", endpoint.Name)\n\t\t\tlbc.AddSyncQueue(obj)\n\t\t},\n\t\tUpdateFunc: func(old, cur interface{}) {\n\t\t\tif !reflect.DeepEqual(old, cur) {\n\t\t\t\tlog.Printf(\"Endpoints %v changed, syncing\", cur.(*api_v1.Endpoints).Name)\n\t\t\t\tlbc.AddSyncQueue(cur)\n\t\t\t}\n\t\t},\n\t}\n}", "func (o *Operation) registerHandler() {\n\t// Add more protocol endpoints here to expose them as controller API endpoints\n\to.handlers = []rest.Handler{\n\t\tcmdutil.NewHTTPHandler(RegisterPath, http.MethodPost, o.Register),\n\t\tcmdutil.NewHTTPHandler(UnregisterPath, http.MethodDelete, o.Unregister),\n\t\tcmdutil.NewHTTPHandler(GetConnectionsPath, http.MethodGet, o.Connections),\n\t\tcmdutil.NewHTTPHandler(ReconnectPath, http.MethodPost, o.Reconnect),\n\t\tcmdutil.NewHTTPHandler(StatusPath, http.MethodPost, o.Status),\n\t\tcmdutil.NewHTTPHandler(BatchPickupPath, http.MethodPost, o.BatchPickup),\n\t\tcmdutil.NewHTTPHandler(ReconnectAllPath, http.MethodGet, o.ReconnectAll),\n\t}\n}", "func registerAllHandlers(mux *http.ServeMux) {\n\tpodnodesselector.Register(mux)\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.POST(\"/v1/clinics/:clinicid/summaries\", wrapper.PostV1ClinicsCliniidSummary)\n\trouter.POST(\"/v1/users/:userid/summaries\", wrapper.PostV1UsersUseridSummaries)\n\trouter.POST(\"/v1/users/:userid/summary\", wrapper.PostV1UsersUseridSummary)\n\n}", "func registerAllHandlers(mux *http.ServeMux) {\n\tmux.HandleFunc(\"/\", handleRoot)\n\tpodnodesselector.Register(mux)\n}", "func RegisterHandlers(router EchoRouter, si ServerInterface) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(\"/roles\", wrapper.List2)\n\trouter.POST(\"/roles\", wrapper.Create3)\n\trouter.DELETE(\"/roles/:roleId\", wrapper.Delete3)\n\trouter.GET(\"/roles/:roleId\", wrapper.Get3)\n\trouter.PUT(\"/roles/:roleId\", wrapper.Update2)\n\n}", "func RegisterEndpoints(router *mux.Router) {\n\tschemaConfig := graphql.SchemaConfig{Query: queries.RootQuery}\n\tschema, err := graphql.NewSchema(schemaConfig)\n\tif err != nil {\n\t\tlog.Fatal(\"Schema was unable to be started up\")\n\t}\n\n\tgraphqlHandler := handler.New(&handler.Config{\n\t\tSchema: &schema,\n\t\tPretty: true,\n\t\tGraphiQL: !secret.Production,\n\t})\n\n\trouter.Handle(\"/graphql\", graphqlHandler)\n}", "func RegisterHandlers(rg *routing.RouteGroup, service Service, authHandler routing.Handler, logger log.Logger) {\n rg.Post(\"/login\", login(service, logger))\n rg.Post(\"/oauth2/github\", authenticateGitHub(service))\n\n rg.Use(authHandler)\n rg.Get(\"/verify_token\", verifyToken(logger))\n}", "func setupHandlers() error {\n\treturn pluginRegistry.Do(func(plugin core.Pluginer) error {\n\t\tfor _, url := range plugin.URLs() {\n\t\t\tif err := url.Register(Context.Router, middlewares); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t}\n\t\treturn nil\n\t})\n}", "func (g gateway) registerEndpoints() (*http.ServeMux, error) {\n\tfor prefix, registers := range g.endpoints {\n\t\tgwmux := runtime.NewServeMux(\n\t\t\tappend([]runtime.ServeMuxOption{runtime.WithErrorHandler(ProtoMessageErrorHandler),\n\t\t\t\truntime.WithMetadata(MetadataAnnotator)}, g.gatewayMuxOptions...)...,\n\t\t)\n\t\tfor _, register := range registers {\n\t\t\tif err := register(\n\t\t\t\tcontext.Background(), gwmux, g.serverAddress, g.serverDialOptions,\n\t\t\t); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\t\t// strip prefix from testRequest URI, but leave the trailing \"/\"\n\t\tg.mux.Handle(prefix, http.StripPrefix(prefix[:len(prefix)-1], gwmux))\n\t}\n\treturn g.mux, nil\n}", "func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(baseURL+\"/inventory\", wrapper.GetInventory)\n\trouter.POST(baseURL+\"/inventory\", wrapper.UpsertInventory)\n\trouter.GET(baseURL+\"/products\", wrapper.ListProducts)\n\trouter.POST(baseURL+\"/products\", wrapper.UpsertProducts)\n\trouter.GET(baseURL+\"/productstock\", wrapper.ListProductStocks)\n\trouter.POST(baseURL+\"/sell\", wrapper.SellFromInventory)\n\n}", "func (s *server) RegisterRoutes(ws *web.Server) {\n\tfor _, subsystem := range s.handlers {\n\t\tif subsystem, ok := subsystem.(web.Registerer); ok {\n\t\t\tsubsystem.RegisterRoutes(ws)\n\t\t}\n\t}\n}", "func RegisterHandlers(r *routing.RouteGroup, service Service, authHandler routing.Handler, logger log.Logger) {\n\tres := resource{service, logger}\n\tr.Use(authHandler)\n\n\tr.Get(\"/orders/<id>\", res.getOrder)\n\tr.Post(\"/orders\", res.placeOrder)\n\tr.Put(\"/orders\", res.updateOrder)\n}", "func registerHandlers() Option {\n\treturn optionFunc(func(c *config) {\n\t\tmux := http.NewServeMux()\n\t\tfor pattern, h := range c.handlers {\n\t\t\tmux.Handle(pattern, h)\n\t\t}\n\t\tc.httpServer.Handler = mux\n\t})\n}", "func (e *EnterpriseEndpoints) Register(s *rpc.Server) {}", "func RegisterHandlers(mux *http.ServeMux) {\n\tprefix := \"/api/v1\"\n\n\tregisterWithAuth(mux, prefix+\"/auth\", HandleAuth)\n\tregisterWithAuth(mux, prefix+\"/agents\", HandleAgents)\n\tregisterWithAuth(mux, prefix+\"/vms\", HandleVMs)\n\tregisterWithAuth(mux, prefix+\"/vms/\", HandleVMs)\n\tregisterWithAuth(mux, prefix+\"/forwards\", HandleForwards)\n\tregisterWithAuth(mux, prefix+\"/images\", HandleImages)\n\tregisterWithAuth(mux, prefix+\"/metrics/json\", HandleJsonMetrics)\n\n\tmux.HandleFunc(prefix+\"/login\", HandleOIDCCallback)\n\n\tInitMetricsHandler()\n\tmux.Handle(prefix+\"/metrics\", GetMetricsHandler())\n}", "func (h *Handlers) RegisterRoutesTo(router gin.IRouter) {\n\trouter.GET(\"/health\", wrapHandler(h.handleGetHealth))\n\trouter.POST(\"/dummy\", wrapHandler(h.handleDummy))\n\n\trouter.POST(\"/collection\", wrapHandler(h.handleCreateCollection))\n\trouter.DELETE(\"/collection\", wrapHandler(h.handleDropCollection))\n\trouter.GET(\"/collection/existence\", wrapHandler(h.handleHasCollection))\n\trouter.GET(\"/collection\", wrapHandler(h.handleDescribeCollection))\n\trouter.POST(\"/collection/load\", wrapHandler(h.handleLoadCollection))\n\trouter.DELETE(\"/collection/load\", wrapHandler(h.handleReleaseCollection))\n\trouter.GET(\"/collection/statistics\", wrapHandler(h.handleGetCollectionStatistics))\n\trouter.GET(\"/collections\", wrapHandler(h.handleShowCollections))\n\n\trouter.POST(\"/partition\", wrapHandler(h.handleCreatePartition))\n\trouter.DELETE(\"/partition\", wrapHandler(h.handleDropPartition))\n\trouter.GET(\"/partition/existence\", wrapHandler(h.handleHasPartition))\n\trouter.POST(\"/partitions/load\", wrapHandler(h.handleLoadPartitions))\n\trouter.DELETE(\"/partitions/load\", wrapHandler(h.handleReleasePartitions))\n\trouter.GET(\"/partition/statistics\", wrapHandler(h.handleGetPartitionStatistics))\n\trouter.GET(\"/partitions\", wrapHandler(h.handleShowPartitions))\n\n\trouter.POST(\"/alias\", wrapHandler(h.handleCreateAlias))\n\trouter.DELETE(\"/alias\", wrapHandler(h.handleDropAlias))\n\trouter.PATCH(\"/alias\", wrapHandler(h.handleAlterAlias))\n\n\trouter.POST(\"/index\", wrapHandler(h.handleCreateIndex))\n\trouter.GET(\"/index\", wrapHandler(h.handleDescribeIndex))\n\trouter.GET(\"/index/state\", wrapHandler(h.handleGetIndexState))\n\trouter.GET(\"/index/progress\", wrapHandler(h.handleGetIndexBuildProgress))\n\trouter.DELETE(\"/index\", wrapHandler(h.handleDropIndex))\n\n\trouter.POST(\"/entities\", wrapHandler(h.handleInsert))\n\trouter.DELETE(\"/entities\", wrapHandler(h.handleDelete))\n\trouter.POST(\"/search\", wrapHandler(h.handleSearch))\n\trouter.POST(\"/query\", wrapHandler(h.handleQuery))\n\n\trouter.POST(\"/persist\", wrapHandler(h.handleFlush))\n\trouter.GET(\"/distance\", wrapHandler(h.handleCalcDistance))\n\trouter.GET(\"/persist/state\", wrapHandler(h.handleGetFlushState))\n\trouter.GET(\"/persist/segment-info\", wrapHandler(h.handleGetPersistentSegmentInfo))\n\trouter.GET(\"/query-segment-info\", wrapHandler(h.handleGetQuerySegmentInfo))\n\trouter.GET(\"/replicas\", wrapHandler(h.handleGetReplicas))\n\n\trouter.GET(\"/metrics\", wrapHandler(h.handleGetMetrics))\n\trouter.POST(\"/load-balance\", wrapHandler(h.handleLoadBalance))\n\trouter.GET(\"/compaction/state\", wrapHandler(h.handleGetCompactionState))\n\trouter.GET(\"/compaction/plans\", wrapHandler(h.handleGetCompactionStateWithPlans))\n\trouter.POST(\"/compaction\", wrapHandler(h.handleManualCompaction))\n\n\trouter.POST(\"/import\", wrapHandler(h.handleImport))\n\trouter.GET(\"/import/state\", wrapHandler(h.handleGetImportState))\n\trouter.GET(\"/import/tasks\", wrapHandler(h.handleListImportTasks))\n\n\trouter.POST(\"/credential\", wrapHandler(h.handleCreateCredential))\n\trouter.PATCH(\"/credential\", wrapHandler(h.handleUpdateCredential))\n\trouter.DELETE(\"/credential\", wrapHandler(h.handleDeleteCredential))\n\trouter.GET(\"/credential/users\", wrapHandler(h.handleListCredUsers))\n\n}", "func RegisterHandlers(router runtime.EchoRouter, si ServerInterface) {\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.GET(\"/customers\", wrapper.Customers)\n\trouter.POST(\"/customers\", wrapper.NewCustomer)\n\trouter.GET(\"/customers/:id\", wrapper.GetCustomer)\n\trouter.PUT(\"/customers/:id\", wrapper.UpdateCustomer)\n\trouter.GET(\"/projects\", wrapper.Projects)\n\trouter.POST(\"/projects\", wrapper.NewProject)\n\trouter.GET(\"/projects/:id\", wrapper.GetProject)\n\trouter.PUT(\"/projects/:id\", wrapper.UpdateProject)\n\trouter.GET(\"/projects/:project_id/issues\", wrapper.Issues)\n\trouter.POST(\"/projects/:project_id/issues\", wrapper.NewIssue)\n\trouter.GET(\"/projects/:project_id/issues/:id\", wrapper.GetIssue)\n\trouter.PUT(\"/projects/:project_id/issues/:id\", wrapper.UpdateIssue)\n\trouter.GET(\"/projects/:project_id/issues/:issue_id/comments\", wrapper.Comments)\n\trouter.POST(\"/projects/:project_id/issues/:issue_id/comments\", wrapper.NewComment)\n\trouter.PUT(\"/projects/:project_id/issues/:issue_id/comments/:id\", wrapper.UpdateComment)\n\trouter.GET(\"/users\", wrapper.Users)\n\trouter.GET(\"/users/:id\", wrapper.GetUser)\n}", "func RegisterAPIHandlers(r pure.IRouteGroup) {\n\tr.Get(\"/token\", tokenGetAPI)\n}", "func (h *RestHandler) Register(api *operations.RestAPISvcAPI) {\n\tapi.EventCreateHandler = operations.EventCreateHandlerFunc(h.eventCreate)\n\tapi.EventReadHandler = operations.EventReadHandlerFunc(h.eventRead)\n\tapi.EventsListHandler = operations.EventsListHandlerFunc(h.eventsList)\n\tapi.EventUpdateHandler = operations.EventUpdateHandlerFunc(h.eventUpdate)\n\tapi.EventDeleteHandler = operations.EventDeleteHandlerFunc(h.eventDelete)\n}", "func (o *Operation) registerHandler() {\n\t// Add more protocol endpoints here to expose them as controller API endpoints\n\to.handlers = []rest.Handler{\n\t\tcmdutil.NewHTTPHandler(SaveDIDPath, http.MethodPost, o.SaveDID),\n\t\tcmdutil.NewHTTPHandler(ResolveDIDPath, http.MethodGet, o.ResolveDID),\n\t\tcmdutil.NewHTTPHandler(CreateDIDPath, http.MethodPost, o.CreateDID),\n\t\tcmdutil.NewHTTPHandler(GetDIDRecordsPath, http.MethodGet, o.GetDIDRecords),\n\t\tcmdutil.NewHTTPHandler(GetDIDPath, http.MethodGet, o.GetDID),\n\t}\n}", "func registerPeerRESTHandlers(router *mux.Router) {\n\tserver := &peerRESTServer{}\n\tsubrouter := router.PathPrefix(peerRESTPath).Subrouter()\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodNetReadPerfInfo).HandlerFunc(httpTraceHdrs(server.NetReadPerfInfoHandler)).Queries(restQueries(peerRESTNetPerfSize)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodCollectNetPerfInfo).HandlerFunc(httpTraceHdrs(server.CollectNetPerfInfoHandler)).Queries(restQueries(peerRESTNetPerfSize)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodGetLocks).HandlerFunc(httpTraceHdrs(server.GetLocksHandler))\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodServerInfo).HandlerFunc(httpTraceHdrs(server.ServerInfoHandler))\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodCPULoadInfo).HandlerFunc(httpTraceHdrs(server.CPULoadInfoHandler))\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodMemUsageInfo).HandlerFunc(httpTraceHdrs(server.MemUsageInfoHandler))\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodDrivePerfInfo).HandlerFunc(httpTraceHdrs(server.DrivePerfInfoHandler)).Queries(restQueries(peerRESTDrivePerfSize)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodDeleteBucket).HandlerFunc(httpTraceHdrs(server.DeleteBucketHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodSignalService).HandlerFunc(httpTraceHdrs(server.SignalServiceHandler)).Queries(restQueries(peerRESTSignal)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodServerUpdate).HandlerFunc(httpTraceHdrs(server.ServerUpdateHandler)).Queries(restQueries(peerRESTUpdateURL, peerRESTSha256Hex, peerRESTLatestRelease)...)\n\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBucketPolicyRemove).HandlerFunc(httpTraceAll(server.RemoveBucketPolicyHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBucketPolicySet).HandlerFunc(httpTraceHdrs(server.SetBucketPolicyHandler)).Queries(restQueries(peerRESTBucket)...)\n\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodDeletePolicy).HandlerFunc(httpTraceAll(server.DeletePolicyHandler)).Queries(restQueries(peerRESTPolicy)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodLoadPolicy).HandlerFunc(httpTraceAll(server.LoadPolicyHandler)).Queries(restQueries(peerRESTPolicy)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodLoadPolicyMapping).HandlerFunc(httpTraceAll(server.LoadPolicyMappingHandler)).Queries(restQueries(peerRESTUserOrGroup)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodDeleteUser).HandlerFunc(httpTraceAll(server.LoadUserHandler)).Queries(restQueries(peerRESTUser)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodLoadUser).HandlerFunc(httpTraceAll(server.LoadUserHandler)).Queries(restQueries(peerRESTUser, peerRESTUserTemp)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodLoadUsers).HandlerFunc(httpTraceAll(server.LoadUsersHandler))\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodLoadGroup).HandlerFunc(httpTraceAll(server.LoadGroupHandler)).Queries(restQueries(peerRESTGroup)...)\n\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodStartProfiling).HandlerFunc(httpTraceAll(server.StartProfilingHandler)).Queries(restQueries(peerRESTProfiler)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodDownloadProfilingData).HandlerFunc(httpTraceHdrs(server.DownloadProflingDataHandler))\n\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodTargetExists).HandlerFunc(httpTraceHdrs(server.TargetExistsHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodSendEvent).HandlerFunc(httpTraceHdrs(server.SendEventHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBucketNotificationPut).HandlerFunc(httpTraceHdrs(server.PutBucketNotificationHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBucketNotificationListen).HandlerFunc(httpTraceHdrs(server.ListenBucketNotificationHandler)).Queries(restQueries(peerRESTBucket)...)\n\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodReloadFormat).HandlerFunc(httpTraceHdrs(server.ReloadFormatHandler)).Queries(restQueries(peerRESTDryRun)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBucketLifecycleSet).HandlerFunc(httpTraceHdrs(server.SetBucketLifecycleHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBucketLifecycleRemove).HandlerFunc(httpTraceHdrs(server.RemoveBucketLifecycleHandler)).Queries(restQueries(peerRESTBucket)...)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBackgroundOpsStatus).HandlerFunc(server.BackgroundOpsStatusHandler)\n\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodTrace).HandlerFunc(server.TraceHandler)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodBackgroundHealStatus).HandlerFunc(server.BackgroundHealStatusHandler)\n\tsubrouter.Methods(http.MethodPost).Path(SlashSeparator + peerRESTMethodLog).HandlerFunc(server.ConsoleLogHandler)\n\n\trouter.NotFoundHandler = http.HandlerFunc(httpTraceAll(notFoundHandler))\n}", "func InstallHandlers(r *router.Router, base router.MiddlewareChain) {\n\tr.GET(\"/auth/api/v1/server/certificates\", base, certsHandler)\n\tr.GET(\"/auth/api/v1/server/info\", base, infoHandler)\n\tr.GET(\"/auth/api/v1/server/client_id\", base, clientIDHandler)\n}", "func setuphandlers() {\n\thttp.HandleFunc(\"/\", rootHandler)\n\thttp.HandleFunc(\"/status\", statusHandler)\n\thttp.HandleFunc(\"/stats\", statsHandler)\n\thttp.HandleFunc(\"/request\", requestHandler)\n}", "func (h *handler) RegisterHandlers(r *mux.Router) {\n\tr.HandleFunc(\"/publickey\", h.publicKey).Methods(\"GET\")\n\tr.HandleFunc(\"/discharge\", h.discharge).Methods(\"POST\")\n}", "func (s *Server) RegisterHandlers(r *mux.Router) {\n\tlogStr := \"{logid}\"\n\tr.HandleFunc(fmt.Sprintf(api.HTTPGetSTH, logStr), s.getSTH).Methods(\"GET\")\n\tr.HandleFunc(fmt.Sprintf(api.HTTPUpdate, logStr), s.update).Methods(\"PUT\")\n\tr.HandleFunc(api.HTTPGetLogs, s.getLogs).Methods(\"GET\")\n}", "func RegisterEndpoints(router *mux.Router) {\n\tfmt.Println(\"Registering wheel endpoints...\")\n\twheelDriver = NewDriver(adaptor.RPi)\n\trouter.HandleFunc(\"/wheels\", setWheelsMovement).Methods(http.MethodPut)\n}", "func (endpoints *endpointDetails) Install(mux login.Mux, paths ...string) {\n\tfor _, prefix := range paths {\n\t\tmux.HandleFunc(path.Join(prefix, RequestTokenEndpoint), endpoints.readyHandler(endpoints.requestToken))\n\t\tmux.HandleFunc(path.Join(prefix, DisplayTokenEndpoint), endpoints.readyHandler(endpoints.displayToken))\n\t\tmux.HandleFunc(path.Join(prefix, ImplicitTokenEndpoint), endpoints.implicitToken)\n\t}\n}", "func (w *AdminWebInterface) registerHandlers(s *iris.Station) {\n\tadmin := s.Party(w.options.Path)\n\t{\n\t\tadmin.Get(\"/\", func(c *iris.Context) {\n\t\t\tw.templates.ExecuteTemplate(c.ResponseWriter, \"index.html\", IndexPage{w.GetName(), \"Welcome to Iris admin panel\"})\n\t\t})\n\t}\n}", "func RegisterHandlers() {\n\tweb.HandleGet(\"/\", handle)\n}", "func RegisterHandlers(healthchecks *health.Health) {\n\thttp.Handle(\"/_/health.json\", healthhttp.HandleHealthJSON(*healthchecks))\n\t// BEGIN __INCLUDE_GRPC__\n\n\t// zpages open a UI for gRPC tracing.\n\t// See http://localhost:8080/_/zpages/rpcz\n\t// And http://localhost:8080/_/zpages/tracez\n\tzpages.Handle(http.DefaultServeMux, \"/_/zpages\")\n\t// END __INCLUDE_GRPC__\n}", "func (s Service) Register(engine *gin.RouterGroup) {\n\tfor i := 0; i < len(s.Routes); i++ {\n\t\tengine.Handle(s.Routes[i].Method, s.Prefix+s.Routes[i].Url, s.Routes[i].Handler)\n\t}\n}", "func (s *Server) setupEndpoints(r *chi.Mux) {\n\tr.Route(\"/api/v1\", func(r chi.Router) {\n\t\tr.Route(\"/users\", func(r chi.Router) {\n\n\t\t})\n\t})\n}", "func AttachAll(e *echo.Echo, m ...echo.MiddlewareFunc) {\n\tfor method, registry := range registries {\n\t\tei := echoHandlerInitializers[method]\n\t\tif ei != nil {\n\t\t\tfor path, handler := range registry {\n\t\t\t\tei(e, path, handler, m...)\n\t\t\t}\n\t\t}\n\t}\n}", "func SetupHandlers(r *mux.Router) {\n\t//object operations\n\tr.HandleFunc(\"/v1/file/upload/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(UploadHandler))))\n\tr.HandleFunc(\"/v1/file/download/{allocation}\", common.UserRateLimit(common.ToByteStream(WithConnection(DownloadHandler)))).Methods(\"POST\")\n\tr.HandleFunc(\"/v1/file/rename/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(RenameHandler))))\n\tr.HandleFunc(\"/v1/file/copy/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CopyHandler))))\n\tr.HandleFunc(\"/v1/file/attributes/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(UpdateAttributesHandler))))\n\tr.HandleFunc(\"/v1/dir/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CreateDirHandler)))).Methods(\"POST\")\n\tr.HandleFunc(\"/v1/dir/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CreateDirHandler)))).Methods(\"DELETE\")\n\tr.HandleFunc(\"/v1/dir/rename/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CreateDirHandler)))).Methods(\"POST\")\n\n\tr.HandleFunc(\"/v1/connection/commit/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CommitHandler))))\n\tr.HandleFunc(\"/v1/file/commitmetatxn/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CommitMetaTxnHandler))))\n\tr.HandleFunc(\"/v1/file/collaborator/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CollaboratorHandler))))\n\tr.HandleFunc(\"/v1/file/calculatehash/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(CalculateHashHandler))))\n\n\t//object info related apis\n\tr.HandleFunc(\"/allocation\", common.UserRateLimit(common.ToJSONResponse(WithConnection(AllocationHandler))))\n\tr.HandleFunc(\"/v1/file/meta/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(FileMetaHandler))))\n\tr.HandleFunc(\"/v1/file/stats/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(FileStatsHandler))))\n\tr.HandleFunc(\"/v1/file/list/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(ListHandler))))\n\tr.HandleFunc(\"/v1/file/objectpath/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(ObjectPathHandler))))\n\tr.HandleFunc(\"/v1/file/referencepath/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(ReferencePathHandler))))\n\tr.HandleFunc(\"/v1/file/objecttree/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(ObjectTreeHandler))))\n\tr.HandleFunc(\"/v1/file/refs/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(RefsHandler)))).Methods(\"GET\")\n\t//admin related\n\tr.HandleFunc(\"/_debug\", common.UserRateLimit(common.ToJSONResponse(DumpGoRoutines)))\n\tr.HandleFunc(\"/_config\", common.UserRateLimit(common.ToJSONResponse(GetConfig)))\n\tr.HandleFunc(\"/_stats\", common.UserRateLimit(stats.StatsHandler))\n\tr.HandleFunc(\"/_statsJSON\", common.UserRateLimit(common.ToJSONResponse(stats.StatsJSONHandler)))\n\tr.HandleFunc(\"/_cleanupdisk\", common.UserRateLimit(common.ToJSONResponse(WithReadOnlyConnection(CleanupDiskHandler))))\n\tr.HandleFunc(\"/getstats\", common.UserRateLimit(common.ToJSONResponse(stats.GetStatsHandler)))\n\n\t//marketplace related\n\tr.HandleFunc(\"/v1/marketplace/shareinfo/{allocation}\", common.UserRateLimit(common.ToJSONResponse(WithConnection(MarketPlaceShareInfoHandler))))\n}", "func registerRoutes(r *mux.Router) {\n\t// authed router that enforces authentication\n\tauthed := r.PathPrefix(conf.AuthedPathName).Subrouter()\n\n\t// attach middleware for all routes\n\tauthed.Use(auth)\n\n\t//Get Calendar\n\tauthed.HandleFunc(fmt.Sprintf(\"/c/{%s}/{%s}\", userIDStr, calendarIDStr), getCalendarHandler).Methods(\"GET\")\n\tauthed.HandleFunc(fmt.Sprintf(\"/c/{%s}\", calendarIDStr), getCalendarHandler).Methods(\"GET\")\n\tauthed.HandleFunc(\"/c\", getCalendarHandler).Methods(\"GET\")\n\tauthed.Handle(\"/calendar.xsl\", loadedXSLHandler(loaded.calendar)).Methods(\"GET\")\n\tauthed.Handle(\"/projectView.xsl\", loadedXSLHandler(loaded.project)).Methods(\"GET\")\n\tauthed.Handle(\"/editItem.xsl\", loadedXSLHandler(loaded.editItem)).Methods(\"GET\")\n\n\t//Get all Calendars of User\n\tauthed.HandleFunc(\"/calendars\", getUserCalendarsHandler).Methods(\"GET\")\n\tauthed.Handle(\"/showCalendars.xsl\", loadedXSLHandler(loaded.showCalendars)).Methods(\"GET\")\n\n\t// Modify Calendar\n\tauthed.HandleFunc(\"/c\", methodHandler(postCalendarHandler, putCalendarHandler, nil)).Methods(\"POST\")\n\tauthed.HandleFunc(fmt.Sprintf(\"/c/{%s}/{%s}\", userIDStr, calendarIDStr),\n\t\tmethodHandler(nil, putCalendarHandler, deleteCalendarHandler)).Methods(\"POST\")\n\tauthed.HandleFunc(fmt.Sprintf(\"/c/{%s}\", calendarIDStr),\n\t\tmethodHandler(nil, putCalendarHandler, deleteCalendarHandler)).Methods(\"POST\")\n\n\t// Delete User\n\tauthed.HandleFunc(\"/api/user\", methodHandler(nil, nil, deleteUserHandler)).Methods(\"POST\")\n\n\tauthed.HandleFunc(\"/api/sharing\", sharingHandler).Methods(\"POST\")\n\n\t// attach auto generated endpoint routes\n\tattachEndpoints(authed)\n\n\tauthed.HandleFunc(\"/logout\", logoutHandler).Methods(\"GET\")\n\n\tr.HandleFunc(\"/api/login\", loginHandler).Methods(\"POST\")\n\tr.HandleFunc(\"/api/register\", registerHandler).Methods(\"POST\")\n\n\t// serve static files (index, impressum, login, register ...). Note that this has to be registered last.\n\tr.PathPrefix(\"/\").Handler(http.FileServer(http.Dir(conf.FrontendDir)))\n}", "func Register(handler Handler) error {\n\tif (handler.Methods & ^ALL) != 0 {\n\t\treturn fmt.Errorf(\"Invalid handler method[s]: %b\", handler.Methods)\n\t}\n\n\tif len(handler.Path) == 0 {\n\t\treturn errors.New(\"Empty path is not supported\")\n\t}\n\tfor method := GET; method < ALL; method <<= 1 {\n\t\tif (method & handler.Methods) != 0 {\n\t\t\terr := register(registries[method], handler)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.POST(baseURL+\"/category\", wrapper.CreateCategory)\n\trouter.GET(baseURL+\"/category/:categoryId\", wrapper.GetCategoryCategoryId)\n\n}", "func registerHandlers(s *server) error {\n\terr := s.AddHandler(\"set\", cmdSet)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.AddHandler(\"get\", cmdGet)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.AddHandler(\"delete\", cmdDelete)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.AddHandler(\"stats\", cmdStats)\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = s.AddHandler(\"quit\", cmdQuit)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func SetHandlers(s *http.ServeMux) {\n\tnotificationRequestPool = &sync.Pool{\n\t\tNew: func() interface{} {\n\t\t\treturn new(sendManyRequest)\n\t\t},\n\t}\n\ts.HandleFunc(\"/json\", jsonHandler)\n\ts.HandleFunc(\"/send\", sendHandler)\n\ts.HandleFunc(\"/sendMany\", sendManyHandler)\n\ts.HandleFunc(\"/home\", homeHandler)\n\n}", "func Handlers() *httprouter.Router {\n\n\t// Create a new router\n\tr := apirouter.New()\n\n\t// Based on service mode\n\tif config.Values.ServiceMode == config.ServiceModeAPI {\n\t\t// r.CrossOriginAllowOriginAll = false\n\t\t// r.CrossOriginAllowOrigin = \"*\"\n\n\t\t// This is used for the \"Origin\" to be returned as the origin\n\t\tr.CrossOriginAllowOriginAll = true\n\n\t\t// Create a middleware stack:\n\t\t// s := apirouter.NewStack()\n\n\t\t// Use your middleware:\n\t\t// s.Use(passThrough)\n\n\t\tapi.RegisterRoutes(r)\n\t\tpersons.RegisterRoutes(r)\n\n\t} // else (another service mode?)\n\n\t// Return the router\n\treturn r.HTTPRouter.Router\n}", "func InitHandlers(e *echo.Echo) error {\n\t// Define prefix\n\tp := \"/api/\" + gaia.APIVersion + \"/\"\n\n\t// --- Register handlers at echo instance ---\n\n\t// Users\n\te.POST(p+\"login\", UserLogin)\n\te.GET(p+\"users\", UserGetAll)\n\te.POST(p+\"user/password\", UserChangePassword)\n\te.DELETE(p+\"user/:username\", UserDelete)\n\te.GET(p+\"user/:username/permissions\", UserGetPermissions)\n\te.PUT(p+\"user/:username/permissions\", UserPutPermissions)\n\te.POST(p+\"user\", UserAdd)\n\te.PUT(p+\"user/:username/reset-trigger-token\", UserResetTriggerToken)\n\n\tperms := e.Group(p + \"permission\")\n\tperms.GET(\"\", PermissionGetAll)\n\n\t// Pipelines\n\te.POST(p+\"pipeline\", CreatePipeline)\n\te.POST(p+\"pipeline/gitlsremote\", PipelineGitLSRemote)\n\te.GET(p+\"pipeline/name\", PipelineNameAvailable)\n\te.POST(p+\"pipeline/githook\", GitWebHook)\n\te.GET(p+\"pipeline/created\", CreatePipelineGetAll)\n\te.GET(p+\"pipeline\", PipelineGetAll)\n\te.GET(p+\"pipeline/:pipelineid\", PipelineGet)\n\te.PUT(p+\"pipeline/:pipelineid\", PipelineUpdate)\n\te.DELETE(p+\"pipeline/:pipelineid\", PipelineDelete)\n\te.POST(p+\"pipeline/:pipelineid/start\", PipelineStart)\n\te.POST(p+\"pipeline/:pipelineid/:pipelinetoken/trigger\", PipelineTrigger)\n\te.PUT(p+\"pipeline/:pipelineid/reset-trigger-token\", PipelineResetToken)\n\te.GET(p+\"pipeline/latest\", PipelineGetAllWithLatestRun)\n\te.POST(p+\"pipeline/periodicschedules\", PipelineCheckPeriodicSchedules)\n\n\t// Settings\n\te.POST(p+\"settings/poll/on\", SettingsPollOn)\n\te.POST(p+\"settings/poll/off\", SettingsPollOff)\n\te.GET(p+\"settings/poll\", SettingsPollGet)\n\n\t// PipelineRun\n\te.POST(p+\"pipelinerun/:pipelineid/:runid/stop\", PipelineStop)\n\te.GET(p+\"pipelinerun/:pipelineid/:runid\", PipelineRunGet)\n\te.GET(p+\"pipelinerun/:pipelineid\", PipelineGetAllRuns)\n\te.GET(p+\"pipelinerun/:pipelineid/latest\", PipelineGetLatestRun)\n\te.GET(p+\"pipelinerun/:pipelineid/:runid/log\", GetJobLogs)\n\n\t// Secrets\n\te.GET(p+\"secrets\", ListSecrets)\n\te.DELETE(p+\"secret/:key\", RemoveSecret)\n\te.POST(p+\"secret\", SetSecret)\n\te.PUT(p+\"secret/update\", SetSecret)\n\n\t// Worker\n\te.GET(p+\"worker/secret\", GetWorkerRegisterSecret)\n\te.POST(p+\"worker/register\", RegisterWorker)\n\te.GET(p+\"worker/status\", GetWorkerStatusOverview)\n\te.GET(p+\"worker\", GetWorker)\n\te.DELETE(p+\"worker/:workerid\", DeregisterWorker)\n\te.POST(p+\"worker/secret\", ResetWorkerRegisterSecret)\n\n\t// Middleware\n\te.Use(middleware.Recover())\n\t//e.Use(middleware.Logger())\n\te.Use(middleware.BodyLimit(\"32M\"))\n\te.Use(AuthMiddleware(&AuthConfig{\n\t\tRoleCategories: rolehelper.DefaultUserRoles,\n\t}))\n\n\t// Extra options\n\te.HideBanner = true\n\n\t// Are we in production mode?\n\tif !gaia.Cfg.DevMode {\n\t\tstaticAssets, err := rice.FindBox(\"../frontend/dist\")\n\t\tif err != nil {\n\t\t\tgaia.Cfg.Logger.Error(\"Cannot find assets in production mode.\")\n\t\t\treturn err\n\t\t}\n\n\t\t// Register handler for static assets\n\t\tassetHandler := http.FileServer(staticAssets.HTTPBox())\n\t\te.GET(\"/\", echo.WrapHandler(assetHandler))\n\t\te.GET(\"/favicon.ico\", echo.WrapHandler(assetHandler))\n\t\te.GET(\"/css/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t\te.GET(\"/js/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t\te.GET(\"/fonts/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t\te.GET(\"/img/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t}\n\n\t// Setup ignored vault keys which cannot be modified directly via the Vault API\n\tignoredVaultKeys = make([]string, 0, 1)\n\tignoredVaultKeys = append(ignoredVaultKeys, gaia.WorkerRegisterKey)\n\n\treturn nil\n}", "func RegisterHandlers(r *routing.Router, version string) {\n\tr.To(\"GET,HEAD\", \"/metrics\", routing.HTTPHandler(promhttp.Handler()))\n}", "func (o *Operation) GetRESTHandlers() []Handler {\n\treturn []Handler{\n\t\tsupport.NewHTTPHandler(keystoresEndpoint, keystoresEndpoint, http.MethodPost, o.createKeystoreHandler),\n\t\tsupport.NewHTTPHandler(keysEndpoint, keysEndpoint, http.MethodPost, o.createKeyHandler),\n\t\tsupport.NewHTTPHandler(capabilityEndpoint, capabilityEndpoint, http.MethodPost, o.updateCapabilityHandler),\n\t\tsupport.NewHTTPHandler(exportEndpoint, exportEndpoint, http.MethodGet, o.exportKeyHandler),\n\t\tsupport.NewHTTPHandler(importEndpoint, importEndpoint, http.MethodPost, o.importKeyHandler),\n\t\tsupport.NewHTTPHandler(signEndpoint, signEndpoint, http.MethodPost, o.signHandler),\n\t\tsupport.NewHTTPHandler(verifyEndpoint, verifyEndpoint, http.MethodPost, o.verifyHandler),\n\t\tsupport.NewHTTPHandler(encryptEndpoint, encryptEndpoint, http.MethodPost, o.encryptHandler),\n\t\tsupport.NewHTTPHandler(decryptEndpoint, decryptEndpoint, http.MethodPost, o.decryptHandler),\n\t\tsupport.NewHTTPHandler(computeMACEndpoint, computeMACEndpoint, http.MethodPost, o.computeMACHandler),\n\t\tsupport.NewHTTPHandler(verifyMACEndpoint, verifyMACEndpoint, http.MethodPost, o.verifyMACHandler),\n\t\tsupport.NewHTTPHandler(wrapEndpoint, wrapEndpoint, http.MethodPost, o.wrapHandler),\n\t\tsupport.NewHTTPHandler(unwrapEndpoint, unwrapEndpoint, http.MethodPost, o.unwrapHandler),\n\t\t// CryptoBox operations\n\t\tsupport.NewHTTPHandler(easyEndpoint, easyEndpoint, http.MethodPost, o.easyHandler),\n\t\tsupport.NewHTTPHandler(easyOpenEndpoint, easyOpenEndpoint, http.MethodPost, o.easyOpenHandler),\n\t\tsupport.NewHTTPHandler(sealOpenEndpoint, sealOpenEndpoint, http.MethodPost, o.sealOpenHandler),\n\t\t// BBS+ operations\n\t\tsupport.NewHTTPHandler(signMultiEndpoint, signMultiEndpoint, http.MethodPost, o.signMultiHandler),\n\t\tsupport.NewHTTPHandler(verifyMultiEndpoint, verifyMultiEndpoint, http.MethodPost, o.verifyMultiHandler),\n\t\tsupport.NewHTTPHandler(deriveProofEndpoint, deriveProofEndpoint, http.MethodPost, o.deriveProofHandler),\n\t\tsupport.NewHTTPHandler(verifyProofEndpoint, verifyProofEndpoint, http.MethodPost, o.verifyProofHandler),\n\t}\n}", "func AddHandlers(r *mux.Router) {\n\t// GET hyperledger fabric status.\n\tr.HandleFunc(\"/fabric/status\", StatusHandler).Methods(http.MethodGet, http.MethodOptions)\n\n\t// POST hyperledger fabric channel.\n\tr.HandleFunc(\"/fabric/channel\", ChannelHandler).Methods(http.MethodPost, http.MethodOptions)\n\n}", "func regHandlers(version string) error {\n\tihandler, err := newIndexHandler(version)\n\tif err != nil {\n\t\treturn err\n\t}\n\thttp.Handle(\"/\", ihandler)\n\thttp.Handle(\"/by-cost\", citiesHandler{\"cost\"})\n\thttp.Handle(\"/by-population\", citiesHandler{\"population\"})\n\thttp.Handle(\"/by-climate\", citiesHandler{\"climate\"})\n\thttp.HandleFunc(\"/city\", addCityHandler)\n\thttp.HandleFunc(\"/talk\", talkHandler)\n\thttp.HandleFunc(\"/message\", messageHandler)\n\treturn nil\n}", "func (endpoints *endpointDetails) Install(mux login.Mux, paths ...string) {\n\tfor _, prefix := range paths {\n\t\tmux.HandleFunc(path.Join(prefix, RequestTokenEndpoint), endpoints.requestToken)\n\t\tmux.HandleFunc(path.Join(prefix, DisplayTokenEndpoint), endpoints.displayToken)\n\t\tmux.HandleFunc(path.Join(prefix, ImplicitTokenEndpoint), endpoints.implicitToken)\n\t}\n}", "func (a *Api) SetHandlers(prefix string, rtr *mux.Router) {\n\t//Health checks and metrics\n\trtr.Handle(\"/metrics\", promhttp.Handler())\n\trtr.HandleFunc(\"/status\", a.GetStatus).Methods(\"GET\")\n\n\t//User management methods\n\trtr.HandleFunc(\"/users\", a.GetUsers).Methods(\"GET\")\n\n\trtr.Handle(\"/user\", varsHandler(a.GetUserInfo)).Methods(\"GET\")\n\trtr.Handle(\"/user/{userid}\", varsHandler(a.GetUserInfo)).Methods(\"GET\")\n\trtr.HandleFunc(\"/user\", a.CreateUser).Methods(\"POST\")\n\trtr.Handle(\"/user\", varsHandler(a.UpdateUser)).Methods(\"PUT\")\n\trtr.Handle(\"/user/{userid}\", varsHandler(a.UpdateUser)).Methods(\"PUT\")\n\trtr.Handle(\"/user/{userid}\", varsHandler(a.DeleteUser)).Methods(\"DELETE\")\n\n\t//Login and token management methods\n\trtr.HandleFunc(\"/login\", a.Login).Methods(\"POST\")\n\trtr.HandleFunc(\"/logout\", a.Logout).Methods(\"POST\")\n\trtr.HandleFunc(\"/login\", a.RefreshSession).Methods(\"GET\")\n\trtr.HandleFunc(\"/serverlogin\", a.ServerLogin).Methods(\"POST\")\n\trtr.Handle(\"/token/{token}\", varsHandler(a.ServerCheckToken)).Methods(\"GET\")\n\n}", "func (s *Server) RegisterHTTPHandlers() {\n\ts.router.HandleFunc(\"/api/v1/chaincode/query\", s.httpHandleCCQuery)\n\ts.router.HandleFunc(\"/api/v1/chaincode/invoke\", s.httpHandleCCInvoke)\n\ts.router.HandleFunc(\"/api/v1/config\", s.httpHandleGetConfig)\n}", "func RouteHandlers() {\n\trouter := mux.NewRouter()\n\n\trouter.HandleFunc(\"/registry\", middlew.CheckDB(routers.Registry)).Methods(\"POST\")\n\n\tPORT := os.Getenv(\"PORT\")\n\n\tif PORT == \"\" {\n\t\tPORT = \"8080\"\n\t}\n\n\thandlersCors := cors.AllowAll().Handler(router)\n\n\tlog.Fatal(http.ListenAndServe(\":\" + PORT, handlersCors))\n\n}", "func (s *Witness) RegisterHandlers(r *mux.Router) {\n\tr.HandleFunc(fmt.Sprintf(\"/%s\", api.WitnessGetCheckpoint), s.getCheckpoint).Methods(\"GET\")\n}", "func registerEndpoint(pattern string, methods []string, fn unboundEndpoint) {\n\tif endpoints == nil {\n\t\tendpoints = make(map[string]unboundEndpoint)\n\t}\n\tif endpoints[pattern] != nil || allowedMethods[pattern] != nil {\n\t\tpanic(fmt.Errorf(\"Pattern %q is already registered\", pattern))\n\t}\n\n\tendpoints[pattern] = fn\n\tallowedMethods[pattern] = methods\n}", "func (e *Endpoint) RegisterRoutes(r gin.IRoutes) {\n\tr.POST(\"/github\", e.rateLimiter.Handler(), e.CreateGithubIssue)\n\tr.POST(\"/intercom\", e.rateLimiter.Handler(), e.CreateIntercomIssue)\n}", "func (s *GaiaHandler) InitHandlers(e *echo.Echo) error {\n\t// Define prefix\n\tp := \"/api/\" + gaia.APIVersion + \"/\"\n\n\t// --- Register handlers at echo instance ---\n\n\t// API router group.\n\tapiGrp := e.Group(p)\n\n\t// API router group with auth middleware.\n\tapiAuthGrp := e.Group(p, AuthMiddleware(&AuthConfig{\n\t\tRoleCategories: rolehelper.DefaultUserRoles,\n\t}))\n\n\t// Endpoints for Gaia primary instance\n\tif gaia.Cfg.Mode == gaia.ModeServer {\n\t\t// Users\n\t\tapiGrp.POST(\"login\", UserLogin)\n\n\t\tapiAuthGrp.GET(\"users\", UserGetAll)\n\t\tapiAuthGrp.POST(\"user/password\", UserChangePassword)\n\t\tapiAuthGrp.DELETE(\"user/:username\", UserDelete)\n\t\tapiAuthGrp.GET(\"user/:username/permissions\", UserGetPermissions)\n\t\tapiAuthGrp.PUT(\"user/:username/permissions\", UserPutPermissions)\n\t\tapiAuthGrp.POST(\"user\", UserAdd)\n\t\tapiAuthGrp.PUT(\"user/:username/reset-trigger-token\", UserResetTriggerToken)\n\n\t\tapiAuthGrp.GET(\"permission\", PermissionGetAll)\n\n\t\t// Pipelines\n\t\t// Create pipeline provider\n\t\tpipelineProvider := pipelines.NewPipelineProvider(pipelines.Dependencies{\n\t\t\tScheduler: s.deps.Scheduler,\n\t\t\tPipelineService: s.deps.PipelineService,\n\t\t})\n\t\tapiAuthGrp.POST(\"pipeline\", pipelineProvider.CreatePipeline)\n\t\tapiAuthGrp.POST(\"pipeline/gitlsremote\", pipelineProvider.PipelineGitLSRemote)\n\t\tapiAuthGrp.GET(\"pipeline/name\", pipelineProvider.PipelineNameAvailable)\n\t\tapiAuthGrp.GET(\"pipeline/created\", pipelineProvider.CreatePipelineGetAll)\n\t\tapiAuthGrp.GET(\"pipeline\", pipelineProvider.PipelineGetAll)\n\t\tapiAuthGrp.GET(\"pipeline/:pipelineid\", pipelineProvider.PipelineGet)\n\t\tapiAuthGrp.PUT(\"pipeline/:pipelineid\", pipelineProvider.PipelineUpdate)\n\t\tapiAuthGrp.DELETE(\"pipeline/:pipelineid\", pipelineProvider.PipelineDelete)\n\t\tapiAuthGrp.POST(\"pipeline/:pipelineid/start\", pipelineProvider.PipelineStart)\n\t\tapiAuthGrp.PUT(\"pipeline/:pipelineid/reset-trigger-token\", pipelineProvider.PipelineResetToken)\n\t\tapiAuthGrp.GET(\"pipeline/latest\", pipelineProvider.PipelineGetAllWithLatestRun)\n\t\tapiAuthGrp.POST(\"pipeline/periodicschedules\", pipelineProvider.PipelineCheckPeriodicSchedules)\n\t\tapiGrp.POST(\"pipeline/githook\", GitWebHook)\n\t\tapiGrp.POST(\"pipeline/:pipelineid/:pipelinetoken/trigger\", pipelineProvider.PipelineTrigger)\n\n\t\t// Settings\n\t\tapiAuthGrp.POST(\"settings/poll/on\", SettingsPollOn)\n\t\tapiAuthGrp.POST(\"settings/poll/off\", SettingsPollOff)\n\t\tapiAuthGrp.GET(\"settings/poll\", SettingsPollGet)\n\n\t\t// PipelineRun\n\t\tapiAuthGrp.POST(\"pipelinerun/:pipelineid/:runid/stop\", pipelineProvider.PipelineStop)\n\t\tapiAuthGrp.GET(\"pipelinerun/:pipelineid/:runid\", pipelineProvider.PipelineRunGet)\n\t\tapiAuthGrp.GET(\"pipelinerun/:pipelineid\", pipelineProvider.PipelineGetAllRuns)\n\t\tapiAuthGrp.GET(\"pipelinerun/:pipelineid/latest\", pipelineProvider.PipelineGetLatestRun)\n\t\tapiAuthGrp.GET(\"pipelinerun/:pipelineid/:runid/log\", pipelineProvider.GetJobLogs)\n\n\t\t// Secrets\n\t\tapiAuthGrp.GET(\"secrets\", ListSecrets)\n\t\tapiAuthGrp.DELETE(\"secret/:key\", RemoveSecret)\n\t\tapiAuthGrp.POST(\"secret\", SetSecret)\n\t\tapiAuthGrp.PUT(\"secret/update\", SetSecret)\n\t}\n\n\t// Worker\n\t// initialize the worker provider\n\tworkerProvider := workers.NewWorkerProvider(workers.Dependencies{\n\t\tScheduler: s.deps.Scheduler,\n\t\tCertificate: s.deps.Certificate,\n\t})\n\tapiAuthGrp.GET(\"worker/secret\", workerProvider.GetWorkerRegisterSecret)\n\tapiAuthGrp.GET(\"worker/status\", workerProvider.GetWorkerStatusOverview)\n\tapiAuthGrp.GET(\"worker\", workerProvider.GetWorker)\n\tapiAuthGrp.DELETE(\"worker/:workerid\", workerProvider.DeregisterWorker)\n\tapiAuthGrp.POST(\"worker/secret\", workerProvider.ResetWorkerRegisterSecret)\n\tapiGrp.POST(\"worker/register\", workerProvider.RegisterWorker)\n\n\t// Middleware\n\te.Use(middleware.Recover())\n\t// e.Use(middleware.Logger())\n\te.Use(middleware.BodyLimit(\"32M\"))\n\n\t// Extra options\n\te.HideBanner = true\n\n\t// Are we in production mode?\n\tif !gaia.Cfg.DevMode {\n\t\tstaticAssets, err := rice.FindBox(\"../frontend/dist\")\n\t\tif err != nil {\n\t\t\tgaia.Cfg.Logger.Error(\"Cannot find assets in production mode.\")\n\t\t\treturn err\n\t\t}\n\n\t\t// Register handler for static assets\n\t\tassetHandler := http.FileServer(staticAssets.HTTPBox())\n\t\te.GET(\"/\", echo.WrapHandler(assetHandler))\n\t\te.GET(\"/favicon.ico\", echo.WrapHandler(assetHandler))\n\t\te.GET(\"/css/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t\te.GET(\"/js/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t\te.GET(\"/fonts/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t\te.GET(\"/img/*\", echo.WrapHandler(http.StripPrefix(\"/\", assetHandler)))\n\t}\n\n\t// Setup ignored vault keys which cannot be modified directly via the Vault API\n\tignoredVaultKeys = make([]string, 0, 1)\n\tignoredVaultKeys = append(ignoredVaultKeys, gaia.WorkerRegisterKey)\n\n\treturn nil\n}", "func registerHandlers() {\n\thttp.HandleFunc(\"/\", handleIndex)\n\thttp.HandleFunc(\"/register\", handleRegister)\n\thttp.HandleFunc(\"/register/ajax/register\", handleRegisterAjaxRegister)\n\thttp.HandleFunc(\"/register/ajax/vericode\", handleRegisterAjaxVericode)\n\thttp.HandleFunc(\"/login\", handleLogin)\n\thttp.HandleFunc(\"/login/ajax\", handleLoginAjax)\n\thttp.HandleFunc(\"/blog/\", handleBlog)\n\thttp.HandleFunc(\"/about/\", handleAbout)\n\thttp.HandleFunc(\"/app/\", handleApp)\n\thttp.HandleFunc(\"/app/chatroom/\", handleAppChatroom)\n\thttp.HandleFunc(\"/app/chatroom/ws\", handleAppChatroomWs)\n\thttp.HandleFunc(\"/app/translator/\", handleAppTranslator)\n\thttp.HandleFunc(\"/app/translator/ajax\", handleAppTranslatorAjax)\n\thttp.HandleFunc(\"/app/todolist/\", handleAppTodoList)\n\thttp.HandleFunc(\"/app/todolist/ajax/add\", handleAppTodoListAjaxAdd)\n\thttp.HandleFunc(\"/app/todolist/ajax/delete\", handleAppTodoListAjaxDelete)\n\thttp.HandleFunc(\"/app/todolist/ajax/moveup\", handleAppTodoListAjaxMoveUp)\n\thttp.HandleFunc(\"/app/todolist/ajax/movedown\", handleAppTodoListAjaxMoveDown)\n\thttp.Handle(\"/static/css/\", http.StripPrefix(\"/static/css\", http.FileServer(http.Dir(filepath.Join(rootPath, \"static/css\")))))\n\thttp.Handle(\"/static/js/\", http.StripPrefix(\"/static/js\", http.FileServer(http.Dir(filepath.Join(rootPath, \"static/js\")))))\n}", "func (a *API) SetHandlers(prefix string, rtr *mux.Router) {\n\n\ta.setHandlers(prefix+\"/v1\", rtr)\n\n\trtr.HandleFunc(\"/export/{userID}\", a.middleware(a.exportController.ExportData, true, \"userID\")).Methods(http.MethodGet)\n\n\t// v0 routes:\n\trtr.HandleFunc(\"/status\", a.getStatus).Methods(http.MethodGet)\n}", "func initApiHandlers(router *itineris.ApiRouter) {\n\trouter.SetHandler(\"pgsqlListDepartments\", apiListDepartments)\n\trouter.SetHandler(\"pgsqlCreateDepartment\", apiCreateDepartment)\n\trouter.SetHandler(\"pgsqlGetDepartment\", apiGetDepartment)\n\trouter.SetHandler(\"pgsqlUpdateDepartment\", apiUpdateDepartment)\n\trouter.SetHandler(\"pgsqlDeleteDepartment\", apiDeleteDepartment)\n}", "func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.POST(baseURL+\"/internal/dispatch\", wrapper.ApiInternalRunsCreate)\n\n}", "func (hk *Kernel) RegisterHandlerToEcho(e *echo.Echo) {\n\tfor path, stat := range hk.stat {\n\t\tlog.Println(\"Register Handler to \" + path)\n\t\te.GET(path, stat.Handler)\n\t}\n}", "func (proc *schedulerProcess) registerEventHandlers() {\n\thttp.Handle(makeProcEventPath(proc, FRAMEWORK_REGISTERED_EVENT), proc)\n\thttp.Handle(makeProcEventPath(proc, FRAMEWORK_REREGISTERED_EVENT), proc)\n\thttp.Handle(makeProcEventPath(proc, RESOURCE_OFFERS_EVENT), proc)\n}", "func (service *MetadataService) Endpoints() map[string]map[string]http.HandlerFunc {\n\thandlers := map[string]map[string]http.HandlerFunc{}\n\n\tfor index, value := range service.config.MetadataPrefixes {\n\t\tserver.Log.Info(\"adding Metadata prefix (\", index, \") \", value)\n\t\thandlers[value+\"/\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetMetadataIndex),\n\t\t}\n\t\thandlers[value+\"/ami-id\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetAmiId),\n\t\t}\n\t\thandlers[value+\"/iam\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetMetadataIAM),\n\t\t}\n\t\thandlers[value+\"/ami-launch-index\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetAmiLaunchIndex),\n\t\t}\n\t\thandlers[value+\"/ami-manifest-path\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetAmiManifestPath),\n\t\t}\n\t\thandlers[value+\"/placement/availability-zone\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetAvailabilityZone),\n\t\t}\n\t\thandlers[value+\"/hostname\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetHostName),\n\t\t}\n\t\thandlers[value+\"/public-hostname\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetPublicHostName),\n\t\t}\n\t\thandlers[value+\"/public-ipv4\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetPublicIpv4),\n\t\t}\n\t\thandlers[value+\"/instance-action\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetInstanceAction),\n\t\t}\n\t\thandlers[value+\"/instance-id\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetInstanceId),\n\t\t}\n\t\thandlers[value+\"/instance-type\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetInstanceType),\n\t\t}\n\t\thandlers[value+\"/iam/\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetIAM),\n\t\t}\n\t\thandlers[value+\"/iam/security-credentials\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": movedPermanently(value + \"/iam/security-credentials/\"),\n\t\t}\n\t\thandlers[value+\"/iam/security-credentials/\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetSecurityCredentials),\n\t\t}\n\t\thandlers[value+\"/iam/security-credentials/{username}\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": service.GetSecurityCredentialDetails,\n\t\t}\n\t\thandlers[value+\"/local-hostname\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetLocalHostName),\n\t\t}\n\t\thandlers[value+\"/local-ipv4\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetLocalIpv4),\n\t\t}\n\t\thandlers[value+\"/mac\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetMac),\n\t\t}\n\t\thandlers[value+\"/profile\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetProfile),\n\t\t}\n\t\thandlers[value+\"/reservation-id\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetReservationId),\n\t\t}\n\t\thandlers[value+\"/security-groups\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetSecurityGroups),\n\t\t}\n\t\thandlers[value+\"/network/interfaces/macs/{mac}/subnet-id\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetSubnetId),\n\t\t}\n\t\thandlers[value+\"/network/interfaces/macs/{mac}/vpc-id\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetVpcId),\n\t\t}\n\t}\n\n\tfor index, value := range service.config.UserdataPrefixes {\n\t\tserver.Log.Info(\"adding Userdata prefix (\", index, \") \", value)\n\n\t\thandlers[value+\"/\"] = map[string]http.HandlerFunc{\n\t\t\t\"GET\": plainText(service.GetUserData),\n\t\t}\n\t}\n\thandlers[\"/latest/dynamic/\"] = map[string]http.HandlerFunc{\n\t\t\"GET\": service.GetDynamicIndex,\n\t}\n\thandlers[\"/latest/dynamic/instance-identity/\"] = map[string]http.HandlerFunc{\n\t\t\"GET\": service.GetDynamicInstanceIdentityIndex,\n\t}\n\thandlers[\"/latest/dynamic/instance-identity/document\"] = map[string]http.HandlerFunc{\n\t\t\"GET\": service.GetDynamicDocument,\n\t}\n\thandlers[\"/latest/api/token\"] = map[string]http.HandlerFunc{\n\t\t\"PUT\": service.GetToken,\n\t}\n\thandlers[\"/\"] = map[string]http.HandlerFunc{\n\t\t\"GET\": service.GetIndex,\n\t}\n\treturn handlers\n}", "func InitHandlers() {\n\thttp.HandleFunc(routes.DedupURI, loghttp.Adapter(dedupHTTP))\n}", "func (host *Host) AddEndpoint(method string, path string, handler HTTPHandler, middlewares ...Middleware) (err error) {\n\t{\n\t\thost.initCheck()\n\t\tpath = host.basepath + solveBasePath(path)\n\t\tdefer func() {\n\t\t\tif err != nil {\n\t\t\t\thost.errList = append(host.errList, err)\n\t\t\t}\n\t\t}()\n\t}\n\tif _, existed := host.handlers[method]; !existed {\n\t\thost.handlers[method] = &endpoint{}\n\t}\n\tif len(host.mstack) > 0 {\n\t\tmiddlewares = append(host.mstack, middlewares...)\n\t}\n\terr = host.handlers[method].Add(path, pipeline(func(context *Context, _ ...string) {\n\t\thandler(context)\n\t}, middlewares...))\n\tif !host.conf.DisableAutoReport {\n\t\tos.Stdout.WriteString(fmt.Sprintf(\"[%4s]\\t%s\\r\\n\", method, path))\n\t}\n\treturn\n}", "func RegisterHandlers(r chi.Router, db *sqlx.DB, logger log.Logger, validate *validator.Validate) {\n\tr.Mount(\"/users\", RegisterHTTPHandlers(NewUserHTTP(db, logger, validate)))\n}", "func (r registerer) registerHandlers(ctx context.Context, extra map[string]interface{}, handler http.Handler) (http.Handler, error) {\n\tname, ok := extra[\"name\"].([]interface{})\n\n\tif !ok {\n\t\treturn nil, errors.New(\"wrong config\")\n\t}\n\n\tif name[0] != string(r) {\n\t\treturn nil, fmt.Errorf(\"unknown register %s\", name)\n\t}\n\n\t// return the actual handler wrapping with custom logic\n\treturn http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {\n\t\tpublic := checkPublicRoutes(req)\n\n\t\tif public {\n\t\t\thandler.ServeHTTP(w, req)\n\t\t\treturn\n\t\t}\n\n\t\tcontext, authErr := GetAuthorizeContext(req)\n\n\t\tif authErr != nil {\n\t\t\tfmt.Println(\"Auth error:\", authErr)\n\t\t\tSendError(w, \"Invalid User Token\", http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t\tif context.Status != 200 {\n\t\t\tSendError(w, context.Error.Message, http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\n\t\t//Set Headers for upstream services\n\t\treq.Header.Set(\"X-Tenant-Id\", context.Data.Auth.TenantId)\n\n\t\tif len(context.Data.Auth.UserId) > 0 {\n\t\t\treq.Header.Set(\"X-User-Id\", context.Data.Auth.UserId)\n\t\t}\n\n\t\tif len(context.Data.Auth.Type) > 0 {\n\t\t\treq.Header.Set(\"X-Auth-Type\", context.Data.Auth.Type)\n\t\t}\n\n\t\tif len(context.Data.Auth.ReferenceId) > 0 {\n\t\t\treq.Header.Set(\"X-Reference-Id\", context.Data.Auth.ReferenceId)\n\t\t}\n\n\t\thandler.ServeHTTP(w, req)\n\n\t\treturn\n\t}), nil\n}", "func RegisterRoutes(router *apirouter.Router) {\n\n\t// Load the service dependencies\n\tloadService()\n\n\t// Set the main index page (navigating to slash)\n\trouter.HTTPRouter.GET(\"/\", router.Request(index))\n\trouter.HTTPRouter.OPTIONS(\"/\", router.SetCrossOriginHeaders)\n\n\t// Set the health request (used for load balancers)\n\trouter.HTTPRouter.GET(\"/\"+config.HealthRequestPath, router.Request(health))\n\trouter.HTTPRouter.OPTIONS(\"/\"+config.HealthRequestPath, router.SetCrossOriginHeaders)\n\trouter.HTTPRouter.HEAD(\"/\"+config.HealthRequestPath, router.SetCrossOriginHeaders)\n\n\t// Set the 404 handler (any request not detected)\n\trouter.HTTPRouter.NotFound = http.HandlerFunc(notFound)\n\n\t// Set the method not allowed\n\trouter.HTTPRouter.MethodNotAllowed = http.HandlerFunc(notAllowed)\n}", "func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL string) {\n\n\twrapper := ServerInterfaceWrapper{\n\t\tHandler: si,\n\t}\n\n\trouter.POST(baseURL+\"/user/login\", wrapper.LoginUser)\n\trouter.POST(baseURL+\"/user/register\", wrapper.RegisterUser)\n\n}", "func AddHandlers(\n\tservices *Services,\n\tmalformedHandler func(w http.ResponseWriter, r *http.Request)) {\n\thttp.HandleFunc(\"/swift/register\", HandlerRegister(services))\n\thttp.HandleFunc(\"/swift/api/v1/create\", HandlerCreate(services))\n\thttp.HandleFunc(\"/swift/api/v1/encrypt\", HandlerEncrypt(services))\n\thttp.HandleFunc(\"/swift/api/v1/decrypt\", HandlerDecrypt(services))\n\thttp.HandleFunc(\"/swift/api/v1/decode-as-json\", HandlerDecodeAsJSON(services))\n\thttp.HandleFunc(\"/\", HandlerStore(services, malformedHandler))\n}", "func RegisterRoutes(e *echo.Echo) {\n\tfor _, route := range router.List() {\n\t\tutil.CallFuncByName(e, route.Method, route.Path, route.Controller)\n\t}\n}", "func registerRoutes(router *httprouter.Router, ctrl *webhook.Controller) {\n\trouter.POST(\"/auto-deploy/docker-hub\", ctrl.AutoDeployDockerHub)\n}", "func registerRoutes(jwtMiddleware *jwtmiddleware.JWTMiddleware) *mux.Router {\n\tr := mux.NewRouter()\n\n\tr.Handle(\"/healthcheck\", http.HandlerFunc(healthCheck)).Methods(\"GET\")\n\n\tr.Handle(\"/message\", http.HandlerFunc(message)).Methods(\"POST\")\n\tr.Handle(\"/message/{id}\", http.HandlerFunc(messageDelete)).Methods(\"DELETE\")\n\tr.Handle(\"/publish\", http.HandlerFunc(publish)).Methods(\"POST\")\n\n\tmsgRouter := mux.NewRouter().PathPrefix(\"/message\").Subrouter()\n\tpubRouter := mux.NewRouter().PathPrefix(\"/publish\").Subrouter()\n\n\tr.PathPrefix(\"/message\").Handler(negroni.New(\n\t\tnegroni.HandlerFunc(jwtMiddleware.HandlerWithNext),\n\t\tnegroni.Wrap(msgRouter),\n\t))\n\n\tr.PathPrefix(\"/publish\").Handler(negroni.New(\n\t\tnegroni.HandlerFunc(jwtMiddleware.HandlerWithNext),\n\t\tnegroni.Wrap(pubRouter),\n\t))\n\n\t// GET - handles upgrading http/https connections to ws/wss.\n\t// the JWT middleware is expecting an access_token\n\t// query parameter within the request\n\tr.Handle(\"/ws\", negroni.New(\n\t\tnegroni.HandlerFunc(jwtMiddleware.HandlerWithNext),\n\t\tnegroni.HandlerFunc(AddUserID),\n\t\tnegroni.Wrap(broker),\n\t))\n\n\treturn r\n}", "func RegisterHttpsServicesAndStartListener() {\n\taph := aphService{}\n\n\t// HelloWorldHandler := httptransport.NewServer(\n\t// \tmakeHelloWorldEndpoint(aph),\n\t// \tdecodeHelloWorldRequest,\n\t// \tencodeResponse,\n\t// )\n\t// HelloDaerahHandler := httptransport.NewServer(\n\t// \tmakeHelloDaerahEndpoint(aph),\n\t// \tdecodeHelloDaerahRequest,\n\t// \tencodeResponse,\n\t// )\n\tGetItenaryHandler := httptransport.NewServer(\n\t\tmakeGetItenaryEndpoint(aph),\n\t\tdecodeGetItenaryRequest,\n\t\tencodeResponse,\n\t)\n\t// http.Handle(\"/HelloWorld\", HelloWorldHandler)\n\t// http.Handle(\"/HelloDaerah\", HelloDaerahHandler)\n\thttp.Handle(\"/GetItenary\", GetItenaryHandler)\n}", "func (g *Glutton) registerHandlers() {\n\n\tfor _, rule := range g.rules {\n\n\t\tif rule.Type == \"conn_handler\" && rule.Target != \"\" {\n\n\t\t\tvar handler string\n\n\t\t\tswitch rule.Name {\n\n\t\t\tcase \"proxy_tcp\":\n\t\t\t\thandler = rule.Name\n\t\t\t\tg.protocolHandlers[rule.Target] = g.protocolHandlers[handler]\n\t\t\t\tdelete(g.protocolHandlers, handler)\n\t\t\t\thandler = rule.Target\n\t\t\t\tbreak\n\n\t\t\tcase \"proxy_ssh\":\n\t\t\t\thandler = rule.Name\n\t\t\t\terr := g.NewSSHProxy(rule.Target)\n\t\t\t\tif err != nil {\n\t\t\t\t\tg.logger.Error(fmt.Sprintf(\"[ssh.prxy] failed to initialize SSH proxy\"))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\trule.Target = handler\n\t\t\t\tbreak\n\t\t\tcase \"proxy_telnet\":\n\t\t\t\thandler = rule.Name\n\t\t\t\terr := g.NewTelnetProxy(rule.Target)\n\t\t\t\tif err != nil {\n\t\t\t\t\tg.logger.Error(fmt.Sprint(\"[telnet.prxy] failed to initialize TELNET proxy\"))\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\trule.Target = handler\n\t\t\t\tbreak\n\t\t\tdefault:\n\t\t\t\thandler = rule.Target\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tif g.protocolHandlers[handler] == nil {\n\t\t\t\tg.logger.Warn(fmt.Sprintf(\"[glutton ] no handler found for %v protocol\", handler))\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tg.processor.RegisterConnHandler(handler, func(conn net.Conn, md *freki.Metadata) error {\n\t\t\t\thost, port, err := net.SplitHostPort(conn.RemoteAddr().String())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\n\t\t\t\tif md == nil {\n\t\t\t\t\tg.logger.Debug(fmt.Sprintf(\"[glutton ] connection not tracked: %s:%s\", host, port))\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tg.logger.Debug(\n\t\t\t\t\tfmt.Sprintf(\"[glutton ] new connection: %s:%s -> %d\", host, port, md.TargetPort),\n\t\t\t\t\tzap.String(\"host\", host),\n\t\t\t\t\tzap.String(\"sport\", port),\n\t\t\t\t\tzap.String(\"dport\", strconv.Itoa(int(md.TargetPort))),\n\t\t\t\t\tzap.String(\"handler\", handler),\n\t\t\t\t)\n\n\t\t\t\tif g.producer != nil {\n\t\t\t\t\terr = g.producer.LogHTTP(conn, md, nil, \"\")\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tg.logger.Error(fmt.Sprintf(\"[glutton ] error: %v\", err))\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tdone := make(chan struct{})\n\t\t\t\tgo g.closeOnShutdown(conn, done)\n\t\t\t\tconn.SetDeadline(time.Now().Add(45 * time.Second))\n\t\t\t\tctx := g.contextWithTimeout(72)\n\t\t\t\terr = g.protocolHandlers[handler](ctx, conn)\n\t\t\t\tdone <- struct{}{}\n\t\t\t\treturn err\n\t\t\t})\n\t\t}\n\t}\n}" ]
[ "0.71475", "0.70961374", "0.70783144", "0.70294124", "0.69553304", "0.6940985", "0.6937912", "0.69187754", "0.6887951", "0.68357706", "0.6795475", "0.6793343", "0.6793343", "0.6793343", "0.6793343", "0.6793343", "0.6793343", "0.6793343", "0.67871064", "0.67544246", "0.67344135", "0.67239845", "0.6710781", "0.67080337", "0.66899025", "0.6688774", "0.66878796", "0.66696244", "0.664495", "0.66356343", "0.66082454", "0.65991944", "0.6565711", "0.6560089", "0.6552516", "0.65479195", "0.6537488", "0.6532624", "0.6520676", "0.64554894", "0.6450964", "0.6416724", "0.64045537", "0.6398637", "0.6370842", "0.6365544", "0.6342779", "0.6333526", "0.63242644", "0.63209885", "0.63153917", "0.6298752", "0.62973934", "0.6297279", "0.62967557", "0.62890387", "0.6287266", "0.6285902", "0.62856627", "0.62530553", "0.62476826", "0.62422633", "0.6232324", "0.62183946", "0.62171817", "0.6208761", "0.6208681", "0.6198971", "0.6186093", "0.61772877", "0.6175722", "0.61717063", "0.61700076", "0.6163922", "0.61572367", "0.61539984", "0.6133772", "0.6126306", "0.61113495", "0.61094767", "0.6106152", "0.6097361", "0.607883", "0.60540366", "0.60502416", "0.6040928", "0.60375917", "0.60330516", "0.60313946", "0.60299695", "0.60286796", "0.60262215", "0.6024786", "0.601375", "0.60081506", "0.59983426", "0.59967875", "0.59933215", "0.5984549", "0.5980967" ]
0.69995373
4
Starting listening and serving
func (s *Server) listenAndServe() (err error) { var listener net.Listener var clientAuth tls.ClientAuthType var ok bool c := s.Config // Set default listening address and port if c.Address == "" { c.Address = DefaultServerAddr } if c.Port == 0 { c.Port = DefaultServerPort } addr := net.JoinHostPort(c.Address, strconv.Itoa(c.Port)) var addrStr string if c.TLS.Enabled { log.Debug("TLS is enabled") addrStr = fmt.Sprintf("https://%s", addr) // If key file is specified and it does not exist or its corresponding certificate file does not exist // then need to return error and not start the server. The TLS key file is specified when the user // wants the server to use custom tls key and cert and don't want server to auto generate its own. So, // when the key file is specified, it must exist on the file system if c.TLS.KeyFile != "" { if !util.FileExists(c.TLS.KeyFile) { return fmt.Errorf("File specified by 'tls.keyfile' does not exist: %s", c.TLS.KeyFile) } if !util.FileExists(c.TLS.CertFile) { return fmt.Errorf("File specified by 'tls.certfile' does not exist: %s", c.TLS.CertFile) } log.Debugf("TLS Certificate: %s, TLS Key: %s", c.TLS.CertFile, c.TLS.KeyFile) } else if !util.FileExists(c.TLS.CertFile) { // TLS key file is not specified, generate TLS key and cert if they are not already generated err = s.autoGenerateTLSCertificateKey() if err != nil { return fmt.Errorf("Failed to automatically generate TLS certificate and key: %s", err) } } cer, err := util.LoadX509KeyPair(c.TLS.CertFile, c.TLS.KeyFile, s.csp) if err != nil { return err } if c.TLS.ClientAuth.Type == "" { c.TLS.ClientAuth.Type = defaultClientAuth } log.Debugf("Client authentication type requested: %s", c.TLS.ClientAuth.Type) authType := strings.ToLower(c.TLS.ClientAuth.Type) if clientAuth, ok = clientAuthTypes[authType]; !ok { return errors.New("Invalid client auth type provided") } var certPool *x509.CertPool if authType != defaultClientAuth { certPool, err = LoadPEMCertPool(c.TLS.ClientAuth.CertFiles) if err != nil { return err } } config := &tls.Config{ Certificates: []tls.Certificate{*cer}, ClientAuth: clientAuth, ClientCAs: certPool, MinVersion: tls.VersionTLS12, MaxVersion: tls.VersionTLS13, CipherSuites: stls.DefaultCipherSuites, } listener, err = tls.Listen("tcp", addr, config) if err != nil { return errors.Wrapf(err, "TLS listen failed for %s", addrStr) } } else { addrStr = fmt.Sprintf("http://%s", addr) listener, err = net.Listen("tcp", addr) if err != nil { return errors.Wrapf(err, "TCP listen failed for %s", addrStr) } } s.listener = listener log.Infof("Listening on %s", addrStr) err = s.checkAndEnableProfiling() if err != nil { s.closeListener() return errors.WithMessage(err, "TCP listen for profiling failed") } // Start serving requests, either blocking or non-blocking if s.BlockingStart { return s.serve() } s.wait = make(chan bool) go s.serve() return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func StartListening() {\n\thttp.HandleFunc(\"/health\", GenerateHandler(\"^/health$\", HealthHandler))\n\thttp.HandleFunc(\"/static/\", GenerateHandler(\"^/(static/(js/|css/|media/)[a-zA-Z0-9._]*)$\", FileHandler))\n\thttp.HandleFunc(\"/audits/\", GenerateHandler(\"^/(static/[a-zA-Z0-9._-]*)$\", FileHandler))\n\thttp.HandleFunc(\"/api/\", GenerateHandler(\"^/api/(get/(all|inventory|host))$\", APIHandler))\n\thttp.HandleFunc(\"/\", GenerateHandler(\"^/(.*)$\", FileHandler))\n\ta := fmt.Sprintf(\"%s:%s\", config.Host, config.Port)\n\tlogger.Infof(\"Start listening \\\"%s\\\"...\", a)\n\tlogger.Fatale(http.ListenAndServe(a, nil), \"Server crashed !\")\n}", "func (s *Server) Run() {\n\tvar l net.Listener\n\tvar err error\n\thost := s.ip+\":\"+s.port\n\tl, err = net.Listen(\"tcp\", host)\n\tif err != nil {\n\t\tlog.Fatal(\"Listen: %v\", err)\n\t}\n\tif s.connLimit > 0 {\n\t\tl = netutil.LimitListener(l, s.connLimit)\n\t}\n\n\terr = http.Serve(l, s)\n\tif err != nil {\n\t\tlog.Fatal(\"http.listenAndServe failed: %s\", err.Error())\n\t}\n\ts.l = l\n\treturn\n}", "func StartListen(request *restful.Request, response *restful.Response) {\n\tportstring := request.PathParameter(\"port-id\")\n\tglog.Info(\"get the port number\", portstring)\n\tportint, err := strconv.Atoi(portstring)\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\t}\n\tpid, pname, err := lib.Getinfofromport(portint)\n\n\tif pid == -1 {\n\t\tresponse.WriteError(500, errors.New(\"the port is not be listend in this machine ( /proc/net/tcp and /proc/net/tcp6)\"))\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\n\t}\n\tglog.Info(pname, pid)\n\n\t//create the process instance and get the detail info of specified pid\n\tPdetail := &model.ProcessDetail{\n\t\tProcess: &process.Process{Pid: 22637},\n\t}\n\tcmd, err := Pdetail.Cmdinfo()\n\tif err != nil {\n\t\tglog.Info(err)\n\t}\n\tglog.Info(cmd)\n\t//TODO get more info of this instance\n\n\t//start listen to specific ip:port for 60s and send the data to es\n\ttimesignal := time.After(time.Second * 30)\n\n\t//start collect and check the timesignal every one minutes\n\tgo lib.Startcollect(portint, device, timesignal)\n\n\tresponse.Write([]byte(\"activated\"))\n\n}", "func (s *server) Run() error {\n\ts.logger.Info(\"starting http server\", logger.String(\"addr\", s.server.Addr))\n\ts.server.Handler = s.gin\n\t// Open listener.\n\ttrackedListener, err := conntrack.NewTrackedListener(\"tcp\", s.addr, s.r)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn s.server.Serve(trackedListener)\n}", "func StartListening() {\n\tlistenAddress := config.Config.HTTP.Address + \":\" + config.Config.HTTP.Port\n\tgo func() {\n\t\tE.Start(listenAddress)\n\t}()\n\n\tlog.Info().Msgf(\"Starting to listening for HTTP requests on %s\", listenAddress)\n}", "func (srv *Server) Run() {\n\tsrv.StartListen()\n}", "func (hs *HttpServer) Start() (err error) {\n\tpanic(\"todo - StartServer\")\n\n\t// Start listening to the server port\n\n\t// Accept connection from client\n\n\t// Spawn a go routine to handle request\n\n}", "func StartListen(request *restful.Request, response *restful.Response) {\n\tportstring := request.PathParameter(\"port-id\")\n\tglog.Info(\"get the port number\", portstring)\n\tportint, err := strconv.Atoi(portstring)\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\t}\n\tpid, _, err := lib.Getinfofromport(portint)\n\n\tif pid == -1 {\n\t\tresponse.WriteError(500, errors.New(\"the port is not be listend in this machine ( /proc/net/tcp and /proc/net/tcp6)\"))\n\t\treturn\n\t}\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\t}\n\t//start listen to specific ip:port for 60s and send the data to es\n\ttimesignal := time.After(time.Second * Defaulttime)\n\t//start collect and check the timesignal every one minutes\n\tif !lib.Activeflag {\n\t\tgo lib.Startcollect(portint, Device, timesignal)\n\t\tlib.Flagmutex.Lock()\n\t\tlib.Activeflag = true\n\t\tresponse.Write([]byte(\"activated\"))\n\t\tlib.Flagmutex.Unlock()\n\t} else {\n\t\tresponse.Write([]byte(\"the server is already been activatied\"))\n\t}\n}", "func startHTTPListener() {\n\thttp.ListenAndServe(\":\"+GetConfig().Port, nil)\n}", "func (hs *HttpServer) Start() (err error) {\n\t//panic(\"todo - StartServer\")\n\n\t// Start listening to the server port\n\n\t// Accept connection from client\n\n\t// Spawn a go routine to handle request\n\tport := hs.ServerPort\n\thost := \"0.0.0.0\"\n\t//delim := \"/r/n\"\n\tln, err := net.Listen(\"tcp\", host+port)\n\tdefer ln.Close()\n\tif err != nil {\n\t\tlog.Panicln(err)\n\t}\n\tlog.Println(\"Listening to connections at '\"+host+\"' on port\", port)\n\n\tfor {\n\t\tconn, err := ln.Accept()\n\t\tif err != nil{\n\t\t\tlog.Panicln(err)\n\t\t}\n\t\t\n\t\tgo hs.handleConnection(conn)\n\t}\n\n\treturn err\n\n\n}", "func (this *ReceiverHolder) Start() error {\n\n\tfmt.Println(\"Starting server ...\")\n\t//register shutdown hook\n\tthis.registerShutdownHook()\n\n\ts := http.Server{}\n\ts.Handler = this.engine\n\n\t//show a informative message\n\tthis.receiver.ShowMessage()\n\n\t//start serving.\n\t// go func() {\n\t// \ts.Serve(this.listener)\n\t// }()\n\t// this.registerShutdownHook()\n\treturn s.Serve(this.listener)\n}", "func Run() {\n\tgo listen()\n}", "func startListen() {\n\tconfig, err := config.Config()\n\tif err != nil {\n\t\tlog.Log().Error.Println(err)\n\t\tpanic(err)\n\t}\n\tstorage, err := storage.Instance()\n\tif err != nil {\n\t\tlog.Log().Error.Println(err)\n\t}\n\tprefix := config.Prefix\n\tconverter := convert.NewConverter(config.CodeLength, storage)\n\tinternalPort := strconv.Itoa(config.Port)\n\tinternalAddress := \":\" + internalPort\n\tln, err := net.Listen(\"tcp\", internalAddress)\n\tif err != nil {\n\t\tlog.Log().Error.Println(err)\n\t\tpanic(err)\n\t}\n\tdefer ln.Close()\n\tfor {\n\t\tconn, err := ln.Accept()\n\t\tif err != nil {\n\t\t\tlog.Log().Error.Println(err)\n\t\t\treturn\n\t\t}\n\t\tgo handleConnection(conn, converter, prefix)\n\t}\n}", "func (g *Goer) listen() {\n\tif g.socketName == \"\" {\n\t\treturn\n\t}\n\n\tif g.mainSocket == nil {\n\t\tswitch g.Transport {\n\t\tcase \"tcp\", \"tcp4\", \"tcp6\", \"unix\", \"unixpacket\", \"ssl\":\n\t\t\tif len(os.Args) > 2 && os.Args[2] == \"graceful\" {\n\t\t\t\tfile := os.NewFile(3, \"\")\n\t\t\t\tlistener, err := net.FileListener(file)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlib.Fatal(\"Fail to listen tcp: %v\", err)\n\t\t\t\t}\n\t\t\t\tg.mainSocket = listener.(*net.TCPListener)\n\t\t\t} else {\n\t\t\t\taddr, err := net.ResolveTCPAddr(g.Transport, g.socketName)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlib.Fatal(\"fail to resolve addr: %v\", err)\n\t\t\t\t}\n\t\t\t\tlistener, err := net.ListenTCP(\"tcp\", addr)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlib.Fatal(\"fail to listen tcp: %v\", err)\n\t\t\t\t}\n\t\t\t\tg.mainSocket = listener\n\t\t\t}\n\t\tcase \"udp\", \"upd4\", \"udp6\", \"unixgram\":\n\t\t\tlistener, err := net.ListenPacket(g.Transport, g.socketName)\n\t\t\tif err != nil {\n\t\t\t\tlib.Fatal(err.Error())\n\t\t\t}\n\t\t\tg.mainSocket = listener\n\t\tdefault:\n\t\t\tlib.Fatal(\"unknown transport layer protocol\")\n\t\t}\n\n\t\tlib.Info(\"server start success...\")\n\t\tg.status = StatusRunning\n\n\t\tgo g.resumeAccept()\n\t}\n}", "func ListenAndServe(ctx context.Context, bin, address, port string) {\n\tfmt.Println(`\n\n███████╗███████╗██╗ ███████╗ ███████╗███████╗████████╗███████╗███████╗███╗ ███╗\n██╔════╝██╔════╝██║ ██╔════╝ ██╔════╝██╔════╝╚══██╔══╝██╔════╝██╔════╝████╗ ████║\n███████╗█████╗ ██║ █████╗█████╗█████╗ ███████╗ ██║ █████╗ █████╗ ██╔████╔██║\n╚════██║██╔══╝ ██║ ██╔══╝╚════╝██╔══╝ ╚════██║ ██║ ██╔══╝ ██╔══╝ ██║╚██╔╝██║\n███████║███████╗███████╗██║ ███████╗███████║ ██║ ███████╗███████╗██║ ╚═╝ ██║\n╚══════╝╚══════╝╚══════╝╚═╝ ╚══════╝╚══════╝ ╚═╝ ╚══════╝╚══════╝╚═╝ ╚═╝`)\n\tlog.Info(ctx, \"server listening\", \"bin\", bin, \"address\", address, \"port\", port)\n\thttp.ListenAndServe(fmt.Sprintf(\"%s:%s\", address, port), mux)\n}", "func main() {\n\tln, err := net.Listen(\"tcp\", \":8888\")\n\tif err != nil {\n\t\t// handle the error, e.g. `log.Fatal(err)`\n\t\tlog.Fatal(err)\n\t}\n\tfmt.Println(\"Listening on \", ln.Addr())\n\tfor {\n\t\tc, err := ln.Accept()\n\t\tif err == nil {\n\t\t\t// do something with `c`\n\t\t\tfmt.Println(\"Connection: \", c)\n\t\t\t// Start goroutines by prepending the `go` keyword to call the serve function\n\t\t\tgo serve(c)\n\t\t}\n\t}\n}", "func (s *Server) Run() {\n\tgo func() {\n\t\t// start serving\n\t\tif err := s.httpServer.ListenAndServe(); err != nil {\n\t\t\tlog.Errora(err)\n\t\t}\n\t}()\n}", "func (h *Handler) Run() {\n\tlog.Printf(\"Listening on %s\", h.Cfg.Server.Address)\n\tserver := &http.Server{\n\t\tHandler: getRouter(),\n\t\tAddr: h.Cfg.Server.Address,\n\t}\n\th.listenErrCh <- server.ListenAndServe()\n}", "func runServer() {\n\t// listen and serve on 0.0.0.0:8080 (for windows \"localhost:8080\")\n\tlog.Fatalln(router.Run(fmt.Sprintf(\":%s\", env.AppPort)))\n}", "func serve(app *app.CallMe) {\n\thandlers.Register(app)\n\n\tapp.Logger.Info(\n\t\t\"Ready to ListenIP\",\n\t\tzap.Int(\"ListenPort\", app.ListenPort),\n\t\tzap.String(\"IP\", app.ListenIP),\n\t)\n\n\tlistenOn := fmt.Sprintf(\"%s:%d\", app.ListenIP, app.ListenPort)\n\terr := http.ListenAndServe(listenOn, nil)\n\tif err != nil {\n\t\tapp.Logger.Error(\"Server error\", zap.Error(err))\n\t}\n}", "func (h *Server) Run() {\n\n\th.g.StartServer()\n}", "func Run(http_handler http.Handler, https_handler http.Handler) {\n\n\tvar server Server\n\tvar port int\n\tvar error error\n\n\tserver.Hostname = os.Getenv(\"HOSTNAME\")\n\tserver.UseHTTP = true\n\tserver.UseHTTPS = false\n\t\n\tport, error = strconv.Atoi(os.Getenv(\"HTTP_PORT\"))\n\tif error != nil {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n\n\tserver.HTTPPort = port\n\tif server.HTTPPort == 0 {\n\t\tserver.HTTPPort = 8000\n\t}\n\n\tport, error = strconv.Atoi(os.Getenv(\"HTTPS_PORT\"))\n\tif error != nil {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n\n\tserver.HTTPSPort = port\n\tif server.HTTPSPort == 0 {\n\t\tserver.HTTPSPort = 8443\n\t}\n\tserver.CertFile = os.Getenv(\"SSL_CERTIFICATE_FILE\")\n\tserver.KeyFile = os.Getenv(\"SSL_KEY_FILE\")\n\n\tif server.UseHTTP && server.UseHTTPS {\n\t\tgo func() {\n\t\t\tstart_HTTPS(https_handler, server)\n\t\t}()\n\n\t\tstart_HTTP(http_handler, server)\n\t} else if server.UseHTTP {\n\t\tstart_HTTP(http_handler, server)\n\t} else if server.UseHTTPS {\n\t\tstart_HTTPS(https_handler, server)\n\t} else {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n}", "func (s *server) Start() {\n\ts.httpServer = &http.Server{Addr: s.listenAddr}\n\n\tgo func() {\n\t\ts.logger.Printf(\"listening on \\\"%s\\\"\\n\", s.httpServer.Addr)\n\n\t\terr := s.httpServer.ListenAndServe()\n\t\tif err != nil && err != http.ErrServerClosed {\n\t\t\ts.logger.Fatalf(\"could not listen on \\\"%s\\\": %v\\n\", s.httpServer.Addr, err)\n\t\t}\n\n\t\ts.done <- true\n\t}()\n}", "func (s *server) Run(addr string) error {\n\treturn http.ListenAndServe(addr, s.handler)\n}", "func (s *Server) Run() error {\n\tbg := s.logger.Bg()\n\tlis, err := net.Listen(\"tcp\", s.hostPort)\n\n\tif err != nil {\n\t\tbg.Fatal(\"Unable to start server\", zap.Error(err))\n\t\treturn err\n\t}\n\n\tbg.Info(\"Starting\", zap.String(\"address\", \"tcp://\"+s.hostPort))\n\treturn s.Gs.Serve(lis)\n}", "func (s *Server) Start(ctx context.Context) error {\n\tif s.ln != nil {\n\t\treturn errors.Reason(\"cannot call Start twice\").Err()\n\t}\n\tln, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", s.cfg.Port))\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.ln = ln\n\n\t_, port, err := net.SplitHostPort(s.ln.Addr().String())\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.cfg.Port, err = strconv.Atoi(port)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tctx, s.cancel = context.WithCancel(ctx)\n\tgo s.serveLoop(ctx)\n\treturn nil\n}", "func startServer(port string, handler http.Handler) {\n\terr := http.ListenAndServe(port, handler)\n\tif err != nil {\n\t\tlogger.Fatal(\"ListenAndServe: \", err)\n\t}\n}", "func startServer(wg *sync.WaitGroup) {\n\tdefer wg.Done()\n\n\tlistener, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", MyHandle.Host, MyHandle.Port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to startServer: %v\", err)\n\t}\n\n\tgrpcServer := grpc.NewServer()\n\tapi.RegisterGoChatServer(grpcServer, &chatServer{})\n\n\terr = grpcServer.Serve(listener)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func (s *Server) Run() {\n\ts.Config.set()\n\tfor {\n\t\tconn, err := s.Listener.Accept()\n\t\tif err != nil {\n\t\t\ts.Config.ErrorLog(\"fail to accept new connection: %s\", err)\n\t\t}\n\t\tgo s.handleConn(conn)\n\t}\n}", "func main() {\n\tlisten_fds := ListenFds()\n\n\tfor _, fd := range listen_fds {\n\t\tl, err := net.FileListener(fd)\n\t\tif err != nil {\n\t\t\t// handle error\n\t\t\tfmt.Println(\"got err\", err)\n\t\t}\n\n\t\thttp.HandleFunc(\"/\", handler)\n\t\thttp.Serve(l, nil)\n\t}\n}", "func (s Server) Start(port int, cb func(Message) error) {\n\tlistenTo := fmt.Sprintf(\":%d\", port)\n\tlog.Printf(\"Launching server on port %d\", port)\n\tlistener, err := net.Listen(\"tcp\", listenTo)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\tfor {\n\t\tconn, err := listener.Accept()\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t\tcontinue\n\t\t}\n\n\t\tc := Client{conn: conn, Id: NewClientId(), raw: s.Raw, Index: s.nextIndex}\n\t\ts.nextIndex++\n\t\tgo handleClient(c, s.Raw, cb)\n\t}\n}", "func (m *Server) Start() error {\n\tgo func() {\n\t\tif err := http.Serve(m.Listener, m); err != http.ErrServerClosed {\n\t\t\tlog.Printf(\"Unable to listen and serve: %v\", err)\n\t\t}\n\t}()\n\treturn nil\n}", "func (sw *Switcher) Start() {\n\tsw.Server.Start()\n}", "func (s *Server) Start() {\n\tif s.URL != \"\" {\n\t\tpanic(\"Server already started\")\n\t}\n\ts.URL = \"amqp://\" + s.Listener.Addr().String()\n\ts.goServe()\n\tif *serve != \"\" {\n\t\tfmt.Fprintln(os.Stderr, \"amqptest: serving on\", s.URL)\n\t\tselect {}\n\t}\n}", "func main() {\n\tserver := server.NewHTTPServer()\n\tserver.Start(3000)\n}", "func (s *serv) Start() {\n\ts.running = true\n\n\tsem := make(chan byte)\n\n\tgo func() {\n\t\t// Start listening\n\t\ts.listen()\n\t\tsem <- 0\n\t}()\n\n\tgo func() {\n\t\tfor {\n\t\t\tp := <-s.pks\n\t\t\t// Dispatch work\n\t\t\tswitch p.(type) {\n\t\t\tcase *network.MessagePacket:\n\t\t\t\t_ = p.(*network.MessagePacket)\n\t\t\tcase *network.ConnectionPacket:\n\t\t\t\t_ = p.(*network.ConnectionPacket)\n\t\t\tdefault:\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t}()\n\n\t<-sem\n}", "func main() {\n\tlog.Printf(\"listening on %s and serving files from %s\\n\", port, dir)\n\thttp.ListenAndServe(port, server.Handler(dir))\n}", "func (s *Server) Run(ctx context.Context) {\n\tlog.Trace(\"Starting Eco server\")\n\n\tgo func() {\n\t\t<-ctx.Done()\n\t\ts.listener.Close()\n\t}()\n\n\ts.ctx = ctx\n\t// Start serving.\n\tlog.Infof(\"Eco server running\")\n\tfor {\n\t\tconn, err := s.listener.Accept()\n\t\tif err != nil {\n\t\t\tvar opErr *net.OpError\n\t\t\tif errors.As(err, &opErr) && strings.Contains(opErr.Error(), \"use of closed network connection\") {\n\t\t\t\t// Probably a normal shutdown\n\t\t\t\treturn\n\t\t\t}\n\t\t\tlog.Errorf(\"Accept error: %v\", err)\n\t\t\treturn\n\t\t}\n\t\tif ctx.Err() != nil {\n\t\t\treturn\n\t\t}\n\t\tgo s.handleRequest(conn)\n\t}\n}", "func Run(p uint, s HandlerSpawner) {\n\t//Start listening for connections\n\tl, e := net.Listen(\"tcp\", \":\"+strconv.Itoa(int(p)))\n\tif e != nil {\n\t\tutil.Log(fmt.Errorf(\"error %q listening on port %d\", e, p), LOG_SERVER)\n\t\treturn\n\t}\n\tdefer l.Close()\n\tfor {\n\t\tif c, e := l.Accept(); e != nil {\n\t\t\tutil.Log(fmt.Errorf(\"error %q accepting connection\", e), LOG_SERVER)\n\t\t} else {\n\t\t\t//Spawn a handler for each new connection.\n\t\t\tgo func(cn net.Conn) {\n\t\t\t\th := s.Spawn()\n\t\t\t\th.Start(cn)\n\t\t\t}(c)\n\t\t}\n\t}\n}", "func (o *HttpServer) Start() error {\n\turi := fmt.Sprintf(\"%s:%d\", o.Host, o.Port)\n\tlog.Printf(\"[HTTP] Server listen on %s\\n\", uri)\n\treturn o.Server.ListenAndServe()\n}", "func (s *Server) Start() error {\n\t// return if already started\n\tif s.listener != nil {\n\t\treturn nil\n\t}\n\n\thttp.HandleFunc(\"/health\", func(w http.ResponseWriter, r *http.Request) {\n\t\tfmt.Fprintln(w, \"OK\")\n\t})\n\n\thttp.Handle(\"/\", registry.New(&registry.Config{\n\t\tIPFSHost: s.ipfsHost,\n\t\tIPFSGateway: s.ipfsGateway,\n\t\tCIDResolvers: s.cidResolvers,\n\t\tCIDStorePath: s.cidStorePath,\n\t}))\n\n\tvar err error\n\ts.listener, err = net.Listen(\"tcp\", s.host)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\ts.Debugf(\"[registry/server] listening on %s\", s.listener.Addr())\n\tif s.tlsKeyPath != \"\" && s.tlsCertPath != \"\" {\n\t\treturn http.ServeTLS(s.listener, nil, s.tlsCertPath, s.tlsKeyPath)\n\t}\n\n\treturn http.Serve(s.listener, nil)\n}", "func (svr *Server) Start() (err error) {\n\n\tfor {\n\t\tcliConn, err := svr.listener.Accept()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\t// save connection\n\t\tsvr.mtx.Lock()\n\t\tsvr.connList.PushBack(cliConn)\n\t\tsvr.mtx.Unlock()\n\n\t\tsvr.logger.Debug(\"Accept new connection\", \"RemoteAddr\", cliConn.RemoteAddr())\n\t\tgo svr.readRequest(cliConn)\n\t}\n}", "func startHttpServer(conn connectors.Clients) *http.Server {\n\n\t// set the server props\n\tsrv := &http.Server{Addr: \":\" + os.Getenv(\"SERVER_PORT\")}\n\n\t// set the router and endpoints\n\tr := mux.NewRouter()\n\tr.HandleFunc(\"/api/v1/streamdata\", func(w http.ResponseWriter, req *http.Request) {\n\t\thandlers.StreamHandler(w, req, conn)\n\t}).Methods(\"POST\", \"OPTIONS\")\n\n\tr.HandleFunc(\"/api/v2/sys/info/isalive\", handlers.IsAlive).Methods(\"GET\", \"OPTIONS\")\n\n\thttp.Handle(\"/\", r)\n\n\t// start our server (concurrent)\n\tif err := srv.ListenAndServe(); err != nil {\n\t\tconn.Error(\"Httpserver: ListenAndServe() error: %v\", err)\n\t\tos.Exit(0)\n\t}\n\n\t// return our srv object\n\treturn srv\n}", "func startServer() {\n\t// index file\n\thttp.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.Redirect(w, r, \"/static/\", http.StatusFound)\n\t}) //设置访问的路由\n\n\t// static file\n\thttp.HandleFunc(\"/static/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, r.URL.Path[1:])\n\t})\n\n\t// other logic handlers\n\thttp.HandleFunc(\"/rank\", rank)\n\thttp.HandleFunc(\"/top\", top)\n\t//\thttp.HandleFunc(\"/update\", update)\n\n\terr := http.ListenAndServe(\":9090\", nil) //设置监听的端口\n\tif err != nil {\n\t\tlog.Fatal(\"ListenAndServe: \", err)\n\t}\n}", "func Listen(appTitle, addr string, webfiles *fs.FS, hostname string) error {\n\tAPPTITLE = appTitle\n\tHOSTNAME = hostname\n\n\tif HOSTNAME == \"\" {\n\t\thn, err := os.Hostname()\n\t\tif err == nil {\n\t\t\tHOSTNAME = hn\n\t\t} else {\n\t\t\tHOSTNAME = \"localhost\"\n\t\t}\n\t}\n\n\t//webfiles := getFileSystem()\n\tt, err := template.ParseFS(*webfiles, \"*.html\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tstaticfiles, err := fs.Sub(*webfiles, \"static\")\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\trouter := gin.Default()\n\trouter.Use(gzip.Gzip(gzip.DefaultCompression))\n\trouter.SetHTMLTemplate(t)\n\trouter.StaticFS(\"/img\", http.FS(staticfiles))\n\trouter.StaticFS(\"/js\", http.FS(staticfiles))\n\trouter.NoRoute(notFoundHandler)\n\trouter.GET(\"/\", viewHandler)\n\trouter.GET(\"/view/:page\", viewHandler)\n\n\tv1 := router.Group(\"/api/v1\")\n\tapiv1.V1Routes(v1)\n\n\treturn router.Run(addr)\n}", "func (s *Server) Run() error {\n\ttcpAddr, err := net.ResolveTCPAddr(\"tcp\", s.conf.Addr)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar listener net.Listener\n\tlistener, err = net.ListenTCP(\"tcp\", tcpAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif s.conf.TLSConfig != nil {\n\t\tlistener = tls.NewListener(listener, s.conf.TLSConfig)\n\t}\n\terr = s.Serve(listener)\n\treturn err\n}", "func (ss *StreamerServer) StartWebServer(bindAddr string) {\n\tmux := ss.webServerHandlers(bindAddr)\n\tsrv := &http.Server{\n\t\tAddr: bindAddr,\n\t\tHandler: mux,\n\t}\n\n\tglog.Info(\"Web server listening on \", bindAddr)\n\tsrv.ListenAndServe()\n}", "func serve(svr *http.Server) {\n\tlog.Info(\"accepting connections\", zap.String(\"addr\", config.Bind))\n\tif err := svr.ListenAndServe(); err != nil {\n\t\tlog.Fatal(\"error serving requests\", zap.Error(err))\n\t}\n}", "func serve() error {\n\n\trouter := configureRoutes()\n\n\thttp.Handle(\"/\", router)\n\n\t// Define port and set to default if environment variable is not set\n\tport := PORT\n\tif len(os.Getenv(\"GO_PORT\")) > 0 {\n\t\tport = os.Getenv(\"GO_PORT\")\n\t}\n\n\tlogger.Info(\"Initiating HTTP Server on Port %v\", port)\n\treturn (http.ListenAndServe(port, router))\n}", "func (s *Server) Start() {\n\tif s.URL != \"\" {\n\t\tpanic(\"Server already started\")\n\t}\n\ts.URL = s.Listener.Addr().String()\n\ts.goServe()\n\tif *serve != \"\" {\n\t\tfmt.Fprintln(os.Stderr, \"grpctest: serving on\", s.URL) // nolint: gas\n\t\tselect {}\n\t}\n}", "func Start() *http.Server {\n\t// Flamecast API\n\thttp.HandleFunc(\"/api/v1/stats\", statsHandler)\n\t// Icecast compatibility API\n\thttp.HandleFunc(\"/admin/metadata\", adminMetadataHandler)\n\t// Main handler for feeding and listening to sources\n\thttp.HandleFunc(\"/\", sourceHandler)\n\n\tfor path, source := range sourcesPathMap {\n\t\tif source.config.Type == configreader.SourceTypePull {\n\t\t\tlogger.Noticef(\"Starting pulling thread for source %s\", path)\n\t\t\tgo pullSource(source)\n\t\t}\n\t}\n\n\tsrv := &http.Server{Addr: config.Bind}\n\tlogger.Notice(\"Server is starting\")\n\tgo func() {\n\t\tif err := http.ListenAndServe(config.Bind, nil); err != nil {\n\t\t\tlogger.Errorf(\"error starting server: %s\", err.Error())\n\t\t}\n\t}()\n\n\treturn srv\n}", "func (s *Server) Start() {\n\ts.wrap()\n\n\tgo func() {\n\t\ts.Config.Serve(s.Listener)\n\t}()\n}", "func (srv *Server) StartListen() {\n\tvar err error\n\tsrv.Listener, err = net.Listen(\"tcp\", fmt.Sprintf(\"%s:%s\", srv.Host, srv.Port))\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tlog.Printf(\"🔓 Listening on %s:%s\", srv.Host, srv.Port)\n\thttp.Serve(srv.Listener, nil)\n}", "func (s *Server) startListening(host string, port uint) (err error) {\n\tln, err := net.Listen(\"tcp\", fmt.Sprintf(\"%s:%d\", host, port))\n\n\tif err == nil {\n\t\tif DEBUG {\n\t\t\tlog.Printf(\"Listening on %s\", ln.Addr())\n\t\t}\n\n\t\tfor _, h := range s.eventHandlers {\n\t\t\th.StartedListener(host, port)\n\t\t}\n\n\t\tctr := 0\n\t\tfor i := range s.listeners {\n\t\t\tctr++\n\t\t\tif s.listeners[i] == nil {\n\t\t\t\ts.listeners[i] = ln\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\n\t\tif ctr >= len(s.listeners) {\n\t\t\t// we're done starting the valid listeners\n\t\t\t// inform Start() about it\n\t\t\tcurrentServerState <- state_listeners_started\n\t\t\tif DEBUG {\n\t\t\t\tlog.Printf(\"Listeners started.\")\n\t\t\t}\n\n\t\t\tfor _, h := range s.eventHandlers {\n\t\t\t\th.ListenersStarted()\n\t\t\t}\n\t\t}\n\n\t\tif ctr == 1 {\n\t\t\tdefer func() {\n\t\t\t\t//everything should be close now\n\t\t\t\tif DEBUG {\n\t\t\t\t\tlog.Printf(\"All Listeners closed.\")\n\t\t\t\t}\n\t\t\t\t// wait() will be waiting for this signal after issuin Close()\n\t\t\t\t// to all listeners\n\t\t\t\tcurrentServerState <- state_listeners_stopped\n\n\t\t\t\tfor _, h := range s.eventHandlers {\n\t\t\t\t\th.ListenersClosed()\n\t\t\t\t}\n\t\t\t}()\n\t\t}\n\n\t\trunning := true\n\t\tfor running {\n\t\t\tif conn, err := ln.Accept(); err == nil {\n\t\t\t\tif DEBUG {\n\t\t\t\t\tlog.Printf(\"Connection received from: %s\", conn.RemoteAddr())\n\t\t\t\t}\n\t\t\t\tpipe := NewPipeline()\n\t\t\t\tpipe.channelHandlers.PushFrontList(s.pipelineFactory.channelHandlers)\n\t\t\t\tcl := s.clientHandler.newConnection(conn, pipe)\n\n\t\t\t\tdefer func() {\n\t\t\t\t\tif DEBUG {\n\t\t\t\t\t\tlog.Printf(\"Closing %s\", conn.RemoteAddr())\n\t\t\t\t\t}\n\t\t\t\t\tconn.Close()\n\t\t\t\t\tpipe.closed(cl)\n\t\t\t\t}()\n\n\t\t\t\tselect {\n\t\t\t\tcase state := <-currentServerState:\n\t\t\t\t\tif state >= state_server_stopping {\n\t\t\t\t\t\trunning = false\n\t\t\t\t\t} else {\n\t\t\t\t\t\t//resend unhandled state\n\t\t\t\t\t\tcurrentServerState <- state\n\t\t\t\t\t}\n\t\t\t\tdefault:\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfor _, h := range s.eventHandlers {\n\t\t\t\t\th.ErrorEncountered(err)\n\t\t\t\t}\n\t\t\t\trunning = false\n\t\t\t}\n\t\t}\n\t\tif DEBUG {\n\t\t\tlog.Printf(\"%s close.\", ln.Addr())\n\t\t}\n\n\t\tfor _, h := range s.eventHandlers {\n\t\t\th.ClosedListener(ln)\n\t\t}\n\t} else {\n\t\tfor _, h := range s.eventHandlers {\n\t\t\th.ErrorEncountered(err)\n\t\t}\n\t}\n\n\treturn\n}", "func main() {\n\tservice.StartWebServer(\"8081\")\n}", "func Run() error {\n\tgo server.ListenAndServe()\n\t// TODO: Improve error handling\n\treturn nil\n}", "func (s *Server) Start(ctx context.Context) error {\n\tif err := s.listenAndEndpoint(); err != nil {\n\t\treturn err\n\t}\n\ts.BaseContext = func(net.Listener) context.Context {\n\t\treturn ctx\n\t}\n\tlog.Infof(\"[HTTP] server listening on: %s\", s.lis.Addr().String())\n\tvar err error\n\tif s.tlsConf != nil {\n\t\terr = s.ServeTLS(s.lis, \"\", \"\")\n\t} else {\n\t\terr = s.Serve(s.lis)\n\t}\n\tif !errors.Is(err, http.ErrServerClosed) {\n\t\treturn err\n\t}\n\treturn nil\n}", "func StartApplicatin() {\n\tmapUrls()\n\trouter.Run(\":8080\")\n}", "func init() {\n\tserver := &Server{Addr: \"127.0.0.1:52525\", Handler: nil}\n\tgo server.ListenAndServe()\n\ttime.Sleep(1 * time.Millisecond)\n}", "func (s *Server) Run() error {\n\tlogging.Server(fmt.Sprintf(\"Starting %s Listener at %s:%d\", s.Protocol, s.Interface, s.Port))\n\n\ttime.Sleep(45 * time.Millisecond) // Sleep to allow the shell to start up\n\tif s.psk == \"reaper\" {\n\t\tfmt.Println()\n\t\tmessage(\"warn\", \"Listener was started using \\\"reaper\\\" as the Pre-Shared Key (PSK) allowing anyone\"+\n\t\t\t\" decrypt message traffic.\")\n\t\tmessage(\"note\", \"Consider changing the PSK by using the -psk command line flag.\")\n\t}\n\tmessage(\"note\", fmt.Sprintf(\"Starting %s listener on %s:%d\", s.Protocol, s.Interface, s.Port))\n\n\tif s.Protocol == \"h2\" {\n\t\tserver := s.Server.(*http.Server)\n\n\t\tdefer func() {\n\t\t\terr := server.Close()\n\t\t\tif err != nil {\n\t\t\t\tm := fmt.Sprintf(\"There was an error starting the %s server:\\r\\n%s\", s.Protocol, err.Error())\n\t\t\t\tlogging.Server(m)\n\t\t\t\tmessage(\"warn\", m)\n\t\t\t\treturn\n\t\t\t}\n\t\t}()\n\t\tgo logging.Server(server.ListenAndServeTLS(s.Certificate, s.Key).Error())\n\t\treturn nil\n\t} else if s.Protocol == \"hq\" {\n\t\tserver := s.Server.(*h2quic.Server)\n\n\t\tdefer func() {\n\t\t\terr := server.Close()\n\t\t\tif err != nil {\n\t\t\t\tm := fmt.Sprintf(\"There was an error starting the hq server:\\r\\n%s\", err.Error())\n\t\t\t\tlogging.Server(m)\n\t\t\t\tmessage(\"warn\", m)\n\t\t\t\treturn\n\t\t\t}\n\t\t}()\n\t\tgo logging.Server(server.ListenAndServeTLS(s.Certificate, s.Key).Error())\n\t\treturn nil\n\t}\n\treturn fmt.Errorf(\"%s is an invalid server protocol\", s.Protocol)\n}", "func main() {\n\thttp.ListenAndServe(\"127.0.0.1:8080\", NewServer())\n}", "func (hSvr *HTTPServer) Start(_ context.Context) error {\n\tgo func() {\n\t\tif err := hSvr.svr.ListenAndServe(); err != nil && err != http.ErrServerClosed {\n\t\t\tlog.L().Fatal(\"Node failed to serve.\", zap.Error(err))\n\t\t}\n\t}()\n\treturn nil\n}", "func (s *Server) Run() error {\n\t// start fetcher, reporter and doc generator in goroutines\n\tgo s.fetcher.Run()\n\tgo s.reporter.Run()\n\tgo s.docGenerator.Run()\n\n\t// start webserver\n\tlistenAddress := s.listenAddress\n\tif listenAddress == \"\" {\n\t\tlistenAddress = DefaultAddress\n\t}\n\n\tr := mux.NewRouter()\n\n\t// register ping api\n\tr.HandleFunc(\"/_ping\", pingHandler).Methods(\"GET\")\n\n\t// github webhook API\n\tr.HandleFunc(\"/events\", s.gitHubEventHandler).Methods(\"POST\")\n\n\t// travisCI webhook API\n\tr.HandleFunc(\"/ci_notifications\", s.ciNotificationHandler).Methods(\"POST\")\n\n\tlogrus.Infof(\"start http server on address %s\", listenAddress)\n\treturn http.ListenAndServe(listenAddress, r)\n}", "func serve(c *cli.Context) (err error) {\n\t// Create server configuration\n\tvar conf config.Config\n\tif conf, err = config.New(); err != nil {\n\t\treturn cli.Exit(err, 1)\n\t}\n\n\t// Update from CLI flags\n\tif addr := c.String(\"addr\"); addr != \"\" {\n\t\tconf.BindAddr = addr\n\t}\n\n\t// Create and run the whisper server\n\tvar server *whisper.Server\n\tif server, err = whisper.New(conf); err != nil {\n\t\treturn cli.Exit(err, 1)\n\t}\n\n\tif err = server.Serve(); err != nil {\n\t\treturn cli.Exit(err, 1)\n\t}\n\treturn nil\n}", "func (m *MetaNode) startServer() (err error) {\n\t// initialize and start the server.\n\tm.httpStopC = make(chan uint8)\n\tln, err := net.Listen(\"tcp\", \":\"+m.listen)\n\tif err != nil {\n\t\treturn\n\t}\n\tgo func(stopC chan uint8) {\n\t\tdefer ln.Close()\n\t\tfor {\n\t\t\tconn, err := ln.Accept()\n\t\t\tselect {\n\t\t\tcase <-stopC:\n\t\t\t\treturn\n\t\t\tdefault:\n\t\t\t}\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tgo m.serveConn(conn, stopC)\n\t\t}\n\t}(m.httpStopC)\n\tlog.LogInfof(\"start server over...\")\n\treturn\n}", "func Start(addr string) {\n\tf = NewServer()\n\thttp.HandleFunc(\"/bayeux\", serveWs)\n\thttp.HandleFunc(\"/\", serveOther)\n\n\t// serve static assets workaround\n\t//http.Handle(\"/file/\", http.StripPrefix(\"/file\", http.FileServer(http.Dir(\"/Users/paul/go/src/github.com/pcrawfor/fayego/runner\"))))\n\n\terr := http.ListenAndServe(addr, nil)\n\tif err != nil {\n\t\tfmt.Println(\"Fatal error \", err.Error())\n\t\tos.Exit(1)\n\t}\n}", "func (s *server) startWith(readTimeout int64, writeTimeout int64) error\t{\n\ts.sock = &http.Server{\n\t\tAddr: \":\"+ strconv.Itoa(int(s.port)),\n\t\tHandler: s,\n\t\tReadTimeout: time.Duration(readTimeout),\n\t\tWriteTimeout: time.Duration(writeTimeout),\n\t\tMaxHeaderBytes: 1 << 20,\n\t}\n\n\tlogger.Infof(\" Go WebServer started at Port %d \", s.port )\n\treturn s.sock.ListenAndServe();\n}", "func main() {\n\tregisterHandlers()\n\tappChatroom.Run() // run the chatroom app\n\t// start the server\n\tch := make(chan bool) // a channel used to get errors\n\tdefer close(ch)\n\tgo startHTTPServer(ch)\n\tgo startHTTPSServer(ch)\n\t<-ch\n\t<-ch\n\tlog.Fatal(\"Servers stopped with errors.\")\n}", "func (s *HttpServer) Run() {\n\n\tgo s.httpServer()\n\t<-s.quitChan\n}", "func main() {\r\n\tbind := fmt.Sprintf(\"%s:%s\", getIP(), getPort())\r\n\tlog.Println(\"Listening on\", bind)\r\n\r\n\terr := http.ListenAndServe(bind, http.HandlerFunc(mainHandle))\r\n\tif err != nil {\r\n\t\tpanic(\"ListenAndServe: \" + err.Error())\r\n\t}\r\n}", "func (s *Server) Start() error {\n\ts.router = configureRouter(s.Config.StaticDir)\n\ts.Logger.Printf(\"serving %v at /static/\", s.Config.StaticDir)\n\ts.httpListener = s.configureHTTPListener()\n\n\tgo func() {\n\t\terr := s.httpListener.ListenAndServe()\n\t\tif err != nil {\n\t\t\t//Normal graceful shutdown error\n\t\t\tif err.Error() == \"http: Server closed\" {\n\t\t\t\ts.Logger.Info(err)\n\t\t\t} else {\n\t\t\t\ts.Logger.Fatal(err)\n\t\t\t}\n\t\t}\n\t}()\n\ts.Logger.Printf(\"listening on %v\", s.Config.Address)\n\treturn nil\n}", "func (f *Frontend) Start() error {\n\n\tlistenAddr := fmt.Sprintf(\"%s:%d\", f.cfg.Host, f.cfg.Port)\n\toriginalListener, err := net.Listen(\"tcp\", listenAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tsl, err := stoppableListener.New(originalListener)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tserver := http.Server{Handler: context.ClearHandler(f.router)}\n\n\tstop := make(chan os.Signal)\n\tsignal.Notify(stop, syscall.SIGINT)\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t\twg.Add(1)\n\t\tdefer wg.Done()\n\t\tserver.Serve(sl)\n\t}()\n\n\tf.log.Println(\"Start serving HTTP requests at \", listenAddr)\n\tselect {\n\tcase signal := <-stop:\n\t\tf.log.Println(\"Got signal: \", signal)\n\t}\n\tf.log.Println(\"Stopping listener\")\n\tsl.Stop()\n\tf.log.Println(\"Waiting on server\")\n\twg.Wait()\n\n\treturn nil\n}", "func (server *Server) Run(addr string) {\n\tlog.Println(\"Yinyo is ready and waiting.\")\n\tlog.Fatal(http.ListenAndServe(addr, server.router))\n}", "func (s *Server) serve(lis net.Listener) {\n\ts.wg.Add(1)\n\tgo func() {\n\t\tlog.Infof(\"Listening on %s\", lis.Addr())\n\t\terr := s.httpServer.Serve(lis)\n\t\tlog.Tracef(\"Finished serving RPC: %v\", err)\n\t\ts.wg.Done()\n\t}()\n}", "func (srv *Server) Start() error {\n\treturn srv.app.Listen(srv.config.BindIP + \":\" + strconv.Itoa(srv.config.Port))\n}", "func (s *SamFSServer) Run() error {\n\tlis, err := net.Listen(\"tcp\", s.port)\n\tif err != nil {\n\t\tglog.Fatalf(\"falied to listen on port :: %s(err=%s)\", s.port, err.Error())\n\t\treturn err\n\t}\n\n\trand.Seed(time.Now().UnixNano())\n\ts.sessionID = rand.Int63()\n\tglog.Infof(\"starting new server with sessionID %d\", s.sessionID)\n\n\tgs := grpc.NewServer()\n\tpb.RegisterNFSServer(gs, s)\n\ts.grpcServer = gs\n\treturn gs.Serve(lis)\n}", "func (w *Webserver) Start() error {\n\n\t// listenAndServe the server\n\tgo func() {\n\t\tw.logger.Infof(\"Http server listening at %d!\", w.config.Port)\n\t\terr := w.listenAndServe()\n\t\tif err != nil && err != http.ErrServerClosed {\n\t\t\tw.logger.Errorw(fmt.Sprintf(\"webserver listening at port [%v] stopped\", w.config.Port), \"error\", err.Error())\n\t\t}\n\t}()\n\n\treturn nil\n}", "func (server *Server) Start() {\n\tmux := http.NewServeMux()\n\n\tfileServer := server.attachStaticFileServer(mux)\n\tserver.attachSystemJSRewriteHandler(mux)\n\tserver.attachCustomHandlers(mux)\n\n\tif server.hub != nil {\n\t\t// add HMR support\n\t\tserver.attachIndexInjectionListener(mux, fileServer)\n\t\tserver.attachWebSocketListeners(mux, server.hub)\n\t\tgo server.hub.run()\n\t}\n\n\tserver.srv = &http.Server{\n\t\tAddr: makeServerAddress(server.port),\n\t\tHandler: mux,\n\t}\n\n\tif err := server.srv.ListenAndServe(); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (server Server) Run() error {\n\terr := server.supervisor.SpawnClient()\n\tif err != nil {\n\t\tserver.logger.Fatalf(\"Error in starting client: %s\", err)\n\t}\n\n\tgo listenForSMS(server.upstreamChannel, server.logger)\n\tserver.logger.Info(\"Listening for SMS\")\n\tserver.logger.Info(\"Starting Webserver\")\n\n\treturn server.webserver.Server.ListenAndServe()\n}", "func (s *server) Run(ctx context.Context) {\n\tif s.banner {\n\t\tfmt.Printf(\"%s\\n\\n\", config.Banner)\n\t}\n\n\tet, err := NewEchoTCP(s.address, s.verbose)\n\tif err != nil {\n\t\tlog.Fatal(err) // exit if creating EchoTCP is failed.\n\t}\n\tdefer et.listener.Close()\n\n\tfmt.Printf(\"server is started at %s\\n\", s.address)\n\tet.Run(ctx)\n}", "func (s *ChartStreamServer) listen() error {\n\tg := gin.New()\n\n\tg.Use(ginrus.Ginrus(log.StandardLogger(), time.RFC3339, true))\n\n\tg.GET(\"/index.yaml\", s.IndexHandler)\n\tg.GET(\"/chart/:name/*version\", s.DirectLinkHandler)\n\n\treturn g.Run(s.config.ListenAddr)\n}", "func (s *server) Start(stop <-chan struct{}) error {\n\tlistener, err := newListener(s.bindAddress)\n\tif err != nil {\n\t\treturn err\n\t}\n\tserver := http.Server{\n\t\tHandler: s.mux,\n\t}\n\t// Run the server\n\tgo func() {\n\t\tlog.Info(\"starting http server\")\n\t\tif err := server.Serve(listener); err != nil && err != http.ErrServerClosed {\n\t\t\tlog.Error(err, \"http server error\")\n\t\t}\n\t}()\n\n\t// Shutdown the server when stop is close\n\t<-stop\n\treturn server.Shutdown(context.Background())\n}", "func (h *Handler) ListenAndServe() error {\n\t// Setup a watcher.\n\tWatchMPD(h.ServerConfig[\"mpd_domain\"]+\":\"+h.ServerConfig[\"mpd_control_port\"], h)\n\n\tport := \":\" + h.ServerConfig[\"server_port\"]\n\tlog.Println(\"Starting server on \" + port)\n\treturn http.ListenAndServe(port, h)\n}", "func (web *WebServer) Start() {\n\tlog.Println(http.ListenAndServe(web.listen, web.router))\n}", "func (s *Server) listen() {\n\tdefer (Track(\"listen\", s.log))()\n\tvar err error\n\ts.listener, err = net.Listen(\"tcp\", s.port)\n\tif err != nil {\n\t\ts.log(\"listen() failed to start per error: %+v\", err)\n\t\tpanic(err)\n\t}\n\ts.log(\"listening at %s\", s.port)\n\tfor {\n\t\tconn, err := s.listener.Accept()\n\t\tif err != nil {\n\t\t\ts.log(\"failed to accept client per error: %+v\", err)\n\t\t} else {\n\t\t\tclient := NewClient(conn, s.clientInput)\n\t\t\ts.log(\"accepted %s\", client.ID)\n\t\t\ts.newClients <- client\n\t\t}\n\t}\n}", "func (self *Proxy) Start() error {\n\n\tself.srv = http.Server{\n\t\tAddr: self.Bind,\n\t\tHandler: self,\n\t}\n\n\tlog.Printf(\"Listening for HTTP client requests at %s.\\n\", self.Bind)\n\n\treturn self.srv.ListenAndServe()\n}", "func Start() {\n\twebServer.Engine.Run(\":\" + strconv.Itoa(cfg.Read().App.WebServerPort))\n}", "func main() {\n\tserver.New().Start()\n}", "func (s *Server) Run(addr string) {\n\tfmt.Println(\"Listening to port 8080\")\n\tlog.Fatal(http.ListenAndServe(addr, s.Router))\n}", "func main() {\n\t// Spin off the hub\n\thub := newHub()\n\tgo hub.run()\n\n\thttp.Handle(\"/frontend/dist/\", http.StripPrefix(\"/frontend/dist/\", http.FileServer(http.Dir(\"./frontend/dist/\"))))\n\thttp.Handle(\"/assets/\", http.StripPrefix(\"/assets/\", http.FileServer(http.Dir(\"./assets/\"))))\n\t// Serve index.html specifically\n\thttp.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\thttp.ServeFile(w, r, \"index.html\")\n\t})\n\thttp.HandleFunc(\"/api/socket\", func(w http.ResponseWriter, r *http.Request) {\n\t\tserveWs(hub, w, r)\n\t})\n\n\tport := \":4567\"\n\tlog.Println(\"Server listening at localhost\" + port)\n\thttp.ListenAndServe(port, nil)\n\n}", "func ListenAndServe(host string, port uint16) {\n\taddr := tools.ToAddressString(host, port)\n\t// Listen to tcp addr\n\tlistener, err := net.Listen(\"tcp\", addr)\n\ttools.LogAndExitIfErr(err)\n\tlog.Printf(\"Start a Echo Server Success! on %s\\n\", addr)\n\t// Create a context\n\tctx := context.Background()\n\tfor {\n\t\t// Wait accept connection\n\t\tconn, err := listener.Accept()\n\t\ttools.LogAndExitIfErr(err)\n\t\tlog.Printf(\"Client %s connection success\\n\", conn.RemoteAddr().String())\n\t\t// Serve a client connection\n\t\tgo serve(ctx, conn)\n\t}\n}", "func (sw *SimpleWebServer) Serve() error {\n\tif sw.running {\n\t\treturn fmt.Errorf(\"already running\")\n\t}\n\tsw.running = true\n\tgo func() {\n\t\t_ = sw.ListenAndServe()\n\t}()\n\n\treturn nil\n}", "func (s *Server) Start() error {\n\tif s.HostPortHTTP == \"\" {\n\t\ts.HostPortHTTP = \":\" + common.DefaultServerPortHTTP\n\t}\n\n\ts.eHandler = endtoend.NewHandler(s.AgentHostPort, s.SamplingServerURL)\n\n\tmux := http.NewServeMux()\n\tmux.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) { return }) // health check\n\tmux.HandleFunc(\"/start_trace\", func(w http.ResponseWriter, r *http.Request) {\n\t\ts.handleJSON(w, r, func() interface{} {\n\t\t\treturn tracetest.NewStartTraceRequest()\n\t\t}, func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\t\treturn s.doStartTrace(req.(*tracetest.StartTraceRequest))\n\t\t})\n\t})\n\tmux.HandleFunc(\"/join_trace\", func(w http.ResponseWriter, r *http.Request) {\n\t\ts.handleJSON(w, r, func() interface{} {\n\t\t\treturn tracetest.NewJoinTraceRequest()\n\t\t}, func(ctx context.Context, req interface{}) (interface{}, error) {\n\t\t\treturn s.doJoinTrace(ctx, req.(*tracetest.JoinTraceRequest))\n\t\t})\n\t})\n\tmux.HandleFunc(\"/create_traces\", s.eHandler.GenerateTraces)\n\n\tlistener, err := net.Listen(\"tcp\", s.HostPortHTTP)\n\tif err != nil {\n\t\treturn err\n\t}\n\ts.listener = listener\n\ts.HostPortHTTP = listener.Addr().String()\n\n\tvar started sync.WaitGroup\n\tstarted.Add(1)\n\tgo func() {\n\t\tstarted.Done()\n\t\thttp.Serve(listener, mux)\n\t}()\n\tstarted.Wait()\n\tlog.Printf(\"Started http server at %s\\n\", s.HostPortHTTP)\n\treturn nil\n}", "func StartServer(portNo string) {\n\ttcpAddr, err := net.ResolveTCPAddr(\"tcp4\", portNo)\n\thandleErr(err)\n\n\tlistener, err := net.ListenTCP(\"tcp\", tcpAddr)\n\thandleErr(err)\n\tfmt.Println(\"Server started successfully..!!\")\n\tfmt.Println(\"\")\n\tfmt.Println(\"\")\n\tfor {\n\t\tconn, err := listener.Accept()\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tgo HandleReceiver(conn)\n\t}\n}", "func (s *Service) serve() {\n\tvar err error\n\tif s.tls {\n\t\tlog.Printf(\" > httpd https://%s\", s.addr)\n\t\terr = s.ln.ListenAndServeTLS(s.certFile, s.keyFile)\n\t} else {\n\t\tlog.Printf(\" > httpd http://%s\", s.addr)\n\t\terr = s.ln.ListenAndServe()\n\t}\n\tif err != nil && !strings.Contains(err.Error(), \"closed\") {\n\t\ts.err <- fmt.Errorf(\"httpd http://%s\\n%s\", s.addr, err)\n\t}\n\t<-s.shutdownChan\n}", "func (s *Server) Start() error {\n\ts.RegisterHTTPHandlers()\n\tlog.Print(fmt.Sprintf(\"Listening HTTP on: %s\", s.url))\n\n\thandler := CORSWrap(s.router)\n\treturn http.ListenAndServe(s.url, handler)\n}", "func Run(httpHandlers http.Handler, httpsHandlers http.Handler, s Server) {\n\tif s.UseHTTP && s.UseHTTPS {\n\t\tgo func() {\n\t\t\tstartHTTPS(httpsHandlers, s)\n\t\t}()\n\n\t\tstartHTTP(httpHandlers, s)\n\t} else if s.UseHTTP {\n\t\tstartHTTP(httpHandlers, s)\n\t} else if s.UseHTTPS {\n\t\tstartHTTPS(httpsHandlers, s)\n\t} else {\n\t\tlog.Println(\"Config file does not specify a listener to start\")\n\t}\n}", "func main() {\n\tif err := http.ListenAndServe(port, handler()); err != nil {\n\t\tlog.Fatal(err)\n\t}\n}", "func (server *Server) Start(laddr *net.TCPAddr) error {\n\n\tlistener, err := server.dataStreamer.CreateListener(laddr)\n\tif err != nil {\n\t\tlog.Print(err)\n\t\treturn err\n\t}\n\tserver.dataStreamer = listener\n\n\tgo server.listen()\n\treturn nil\n\n}", "func startHTTPServer(ch chan<- bool) {\n\tserver := http.Server{\n\t\tAddr: \":80\",\n\t}\n\tlog.Println(\"HTTP server started (listening on port 80).\")\n\tlog.Println(\"HTTP server stopped with error:\", server.ListenAndServe())\n\tch <- true\n}", "func (s *Server) Start() {\n\tfor {\n\t\tc, err := s.rtspListener.AcceptTCP()\n\n\t\tif err != nil {\n\t\t\tif _, ok := err.(net.Error); ok && strings.HasSuffix(err.Error(), \": use of closed network connection\") {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tfmt.Println(err)\n\t\t\tcontinue\n\t\t}\n\n\t\tc.SetReadBuffer(osReceiveBufferSize)\n\n\t\t// Handle the new connection\n\t\tgo s.handleConnection(c)\n\t}\n}" ]
[ "0.7514135", "0.7163052", "0.70899796", "0.70569307", "0.70437276", "0.7033033", "0.6997294", "0.69901323", "0.6986287", "0.69766617", "0.69289196", "0.6922364", "0.69163877", "0.69059926", "0.6895818", "0.6890497", "0.68595886", "0.6855492", "0.68255186", "0.67886686", "0.67529345", "0.6718495", "0.6717718", "0.67032284", "0.6687472", "0.66867256", "0.6686361", "0.66814584", "0.66779697", "0.6672045", "0.6670178", "0.6668161", "0.6667711", "0.66662043", "0.66606444", "0.66588515", "0.6656232", "0.6656118", "0.6649336", "0.66388303", "0.66352606", "0.66336834", "0.66315687", "0.66248417", "0.6618385", "0.66122067", "0.6606447", "0.6604715", "0.66037935", "0.6602359", "0.66021705", "0.6594633", "0.6589462", "0.65882933", "0.65871453", "0.65737545", "0.6569605", "0.6562947", "0.65627295", "0.6560587", "0.65585095", "0.65511507", "0.65468043", "0.6542143", "0.6542142", "0.65385926", "0.6537695", "0.6537262", "0.6528638", "0.6527363", "0.6526039", "0.65244174", "0.65221316", "0.6519971", "0.6508976", "0.65056735", "0.6501346", "0.64994705", "0.64982706", "0.64978904", "0.64961237", "0.649189", "0.64908886", "0.6483623", "0.6481289", "0.6480357", "0.64744717", "0.64740205", "0.6473671", "0.6471281", "0.64678925", "0.64632726", "0.6462455", "0.6459916", "0.645825", "0.645569", "0.64549744", "0.6453543", "0.64454764", "0.6444189", "0.6443345" ]
0.0
-1
HealthCheck pings the database to determine if it is reachable
func (s *Server) HealthCheck(ctx context.Context) error { return s.db.PingContext(ctx) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (db *Database) HealthCheck() error {\n\tsqlDB, err := db.conn.DB()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = sqlDB.Ping()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func HealthCheck(w http.ResponseWriter, r *http.Request) {\n\tdbUp := DBClient.Check()\n\n\tif dbUp {\n\t\tdata, _ := json.Marshal(healthCheckResponse{Status: \"UP\"})\n\t\twriteJSONResponse(w, http.StatusOK, data)\n\t} else {\n\t\tdata, _ := json.Marshal(healthCheckResponse{Status: \"Database not accessible\"})\n\t\twriteJSONResponse(w, http.StatusServiceUnavailable, data)\n\t}\n}", "func (d *DBHealthChecker) CheckHealth() error {\n\treturn d.db.Ping()\n}", "func (db *Database) HealthCheck() error {\n\tif db == nil || db.conn == nil {\n\t\treturn hord.ErrNoDial\n\t}\n\terr := db.conn.Query(\"SELECT now() FROM system.local;\").Exec()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"health check of Cassandra cluster failed\")\n\t}\n\treturn nil\n}", "func (h *handler) Health(ctx context.Context) error {\n\treturn h.dbClient.Ping()\n}", "func (db *sqlstore) Health() error {\n\tctx, cancel := context.WithTimeout(context.Background(), time.Second*5)\n\tdefer cancel()\n\n\treturn db.PingContext(ctx)\n}", "func (s *PostgresStorage) HealthCheck() error {\n\treturn s.db.Ping()\n}", "func (c *PostgreSQLConnection) Health() (bool, error) {\n\ttimer := time.Now().Add(1 * time.Second)\n\tctx, cancel := context.WithDeadline(context.Background(), timer)\n\tdefer cancel()\n\n\terr := c.db.PingContext(ctx)\n\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func (e *Endpoint) Check(ctx echo.Context) error {\n\thealthData := e.service.HealthCheck()\n\n\tif !healthData.Database {\n\t\treturn ctx.JSON(http.StatusServiceUnavailable, healthData)\n\t}\n\treturn ctx.JSON(http.StatusOK, healthData)\n}", "func HealthCheck(w http.ResponseWriter, r *http.Request) {\n\tres, err := rc.Ping().Result()\n\tif err != nil || res != \"PONG\" {\n\t\tlog.Error(\"redis connection failed\")\n\t\tvar failure = map[string]string{\"redis\": \"connection failed\"}\n\t\tdata, _ := json.Marshal(&failure)\n\t\tw.WriteHeader(http.StatusInternalServerError)\n\t\tw.Write(data)\n\t\treturn\n\t}\n\tw.WriteHeader(http.StatusOK)\n}", "func Ping(){\n\tif err := db.Ping(); err != nil {\n\t\tpanic(err)\n\t}\n}", "func StatusCheck(ctx context.Context, db *DB) error {\n\tctx, span := otel.Tracer(\"database\").Start(ctx, \"foundation.database.statuscheck\")\n\tdefer span.End()\n\n\t// First check we can ping the database.\n\tvar pingError error\n\tfor attempts := 1; ; attempts++ {\n\t\tpingError = db.Ping(ctx)\n\t\tif pingError == nil {\n\t\t\tbreak\n\t\t}\n\t\ttime.Sleep(time.Duration(attempts) * 100 * time.Millisecond)\n\t\tif ctx.Err() != nil {\n\t\t\treturn ctx.Err()\n\t\t}\n\t}\n\n\t// Make sure we didn't timeout or be cancelled.\n\tif ctx.Err() != nil {\n\t\treturn ctx.Err()\n\t}\n\n\t// Run a simple query to determine connectivity. Running this query forces\n\t// a round trip to the database.\n\tconst q = `SELECT true`\n\tvar tmp bool\n\treturn db.QueryRow(ctx, q).Scan(&tmp)\n}", "func (p *PostgreSQL) Check(ctx context.Context) (err error) {\n\tdb, err := sql.Open(\"postgres\", p.dsn)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tdefer func(db *sql.DB) {\n\t\tif dberr := db.Close(); dberr != nil {\n\t\t\terr = dberr\n\t\t}\n\t}(db)\n\n\terr = db.PingContext(ctx)\n\tif err != nil {\n\t\tif checker.IsConnectionRefused(err) {\n\t\t\treturn checker.NewExpectedError(\n\t\t\t\t\"failed to establish a connection to the postgresql server\", err,\n\t\t\t\t\"dsn\", p.dsn,\n\t\t\t)\n\t\t}\n\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (v *VerticaDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {\n\n\tlog.DefaultLogger.Debug(\"Inside datasource.CheckHealth Function\", \"request\", req)\n\n\tvar status = backend.HealthStatusOk\n\tconnDB, err := v.GetVerticaDb(req.PluginContext)\n\n\tif err != nil {\n\t\tlog.DefaultLogger.Error(\"unable to get sql.DB connection: \" + err.Error())\n\t\treturn &backend.CheckHealthResult{\n\t\t\tStatus: backend.HealthStatusError,\n\t\t\tMessage: fmt.Sprintf(\"%s\", err),\n\t\t}, nil\n\t}\n\t// https://golang.org/pkg/database/sql/#DBStats\n\tlog.DefaultLogger.Debug(fmt.Sprintf(\"%s connection stats open connections =%d, InUse = %d, Ideal = %d\", req.PluginContext.DataSourceInstanceSettings.Name, connDB.Stats().MaxOpenConnections, connDB.Stats().InUse, connDB.Stats().Idle))\n\tconnection, err := connDB.Conn(ctx)\n\n\tif err != nil {\n\t\tlog.DefaultLogger.Info(fmt.Sprintf(\"CheckHealth :connection: %s\", err))\n\t\treturn &backend.CheckHealthResult{\n\t\t\tStatus: backend.HealthStatusError,\n\t\t\tMessage: fmt.Sprintf(\"%s\", err),\n\t\t}, nil\n\t}\n\n\tif err = connection.PingContext(context.Background()); err != nil {\n\t\tlog.DefaultLogger.Error(\"Error while connecting to the Vertica Database: \" + err.Error())\n\t\treturn &backend.CheckHealthResult{\n\t\t\tStatus: backend.HealthStatusError,\n\t\t\tMessage: fmt.Sprintf(\"%s\", err),\n\t\t}, nil\n\t}\n\tdefer connection.Close()\n\n\tresult, err := connection.QueryContext(ctx, \"SELECT version()\")\n\n\tif err != nil {\n\t\tlog.DefaultLogger.Error(\"Health check error: \" + err.Error())\n\t\treturn &backend.CheckHealthResult{\n\t\t\tStatus: backend.HealthStatusError,\n\t\t\tMessage: fmt.Sprintf(\"%s\", err),\n\t\t}, nil\n\t}\n\n\tdefer result.Close()\n\n\tvar queryResult string\n\n\tif result.Next() {\n\t\terr = result.Scan(&queryResult)\n\t\tif err != nil {\n\t\t\tlog.DefaultLogger.Error(\"Health check error: \" + err.Error())\n\t\t\treturn &backend.CheckHealthResult{\n\t\t\t\tStatus: backend.HealthStatusError,\n\t\t\t\tMessage: fmt.Sprintf(\"%s\", err),\n\t\t\t}, nil\n\t\t}\n\t}\n\n\treturn &backend.CheckHealthResult{\n\t\tStatus: status,\n\t\tMessage: fmt.Sprintf(\"Successfully connected to %s\", queryResult),\n\t}, nil\n}", "func (rb *redisBackend) HealthCheck(ctx context.Context) error {\n\tredisConn, err := rb.healthCheckPool.GetContext(ctx)\n\tif err != nil {\n\t\treturn status.Errorf(codes.Unavailable, \"%v\", err)\n\t}\n\tdefer handleConnectionClose(&redisConn)\n\n\t_, err = redisConn.Do(\"PING\")\n\t// Encountered an issue getting a connection from the pool.\n\tif err != nil {\n\t\treturn status.Errorf(codes.Unavailable, \"%v\", err)\n\t}\n\treturn nil\n}", "func checkBeat(db *sql.DB) (err error) {\n\tctx := context.Background()\n\tctx, cancel := context.WithTimeout(ctx, heartbeatTimeout)\n\tdefer cancel()\n\n\treturn db.PingContext(ctx)\n}", "func (db DatabaseRedis) Health() error {\n\treturn db.Client.Ping().Err()\n}", "func DatabasePingCheck(database *sql.DB, timeout time.Duration) Check {\n\treturn func() error {\n\t\tctx, cancel := context.WithTimeout(context.Background(), timeout)\n\t\tdefer cancel()\n\t\tif database == nil {\n\t\t\treturn fmt.Errorf(\"database is nil\")\n\t\t}\n\t\treturn database.PingContext(ctx)\n\t}\n}", "func DatabasePingCheck(database *sql.DB, timeout time.Duration) Check {\n\treturn func() error {\n\t\tctx, cancel := context.WithTimeout(context.Background(), timeout)\n\t\tdefer cancel()\n\t\tif database == nil {\n\t\t\treturn fmt.Errorf(\"database is nil\")\n\t\t}\n\t\treturn database.PingContext(ctx)\n\t}\n}", "func check() {\n\tconn, err := pool.Acquire(context.Background())\n\n\tif err != nil {\n\t\tlogger.Error(\"connection acquirement failed, reason %v\", err)\n\t} else {\n\t\tctx, cancel := context.WithDeadline(context.Background(),\n\t\t\ttime.Now().Add(2*time.Second))\n\t\tdefer cancel()\n\n\t\terr = conn.Conn().Ping(ctx)\n\n\t\tif err != nil {\n\t\t\tlogger.Error(\"database pool is unhealthy, reason %v\", err)\n\t\t\tos.Exit(1)\n\t\t} else {\n\t\t\tlogger.Info(\"database connection ok\")\n\t\t}\n\t}\n}", "func RegisterHealthCheck(mux *goji.Mux, db *postgres.DB) {\n\thealth := healthcheck.NewHandler()\n\n\thealth.AddLivenessCheck(\"goroutine-threshold\", healthcheck.GoroutineCountCheck(100))\n\thealth.AddReadinessCheck(\"postgres\", healthcheck.DatabasePingCheck(db.DB.DB, 1*time.Second))\n\n\tmux.HandleFunc(pat.Get(\"/ready\"), health.ReadyEndpoint)\n\tmux.HandleFunc(pat.Get(\"/live\"), health.LiveEndpoint)\n}", "func (c *Connection) healthCheck() {\n\tfor {\n\t\tselect {\n\t\tcase <-time.After(healthCheckTime):\n\t\t\tif !c.Retrying {\n\t\t\t\t// capture current rmq host\n\t\t\t\toldHost := c.Config.Host\n\n\t\t\t\tif err := c.validateHost(); err != nil {\n\t\t\t\t\tkillService(\"failed to validate rmq host: \", err)\n\t\t\t\t}\n\n\t\t\t\t// this means new host was assigned meanwhile (in c.validateHost())\n\t\t\t\tif oldHost != c.Config.Host {\n\t\t\t\t\tif err := c.recreateConn(); err != nil {\n\t\t\t\t\t\tkillService(\"failed to recreate rmq connection: \", err)\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Println(\"rmq connected to new host: \", c.Config.Host)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func (m *MongoStore) HealthCheck() error {\n\n\t_, err := decodeBytes(m.db.RunCommand(context.Background(), bson.D{{\"serverStatus\", 1}}))\n\tif err != nil {\n\t\treturn pkgerrors.Wrap(err, \"Error getting server status\")\n\t}\n\n\treturn nil\n}", "func (dbProvider *dbProvider) Ping() error {\n\t// dbProvider.instance.DB().Ping()\n\t// Ping only checks if the connection is available in the pool\n\t// If not and connection limit is not reached it'll create one connection\n\t// If the instance of connection is available in the pool and if it was killed by the database server\n\t// \t- In this case Ping will return the available instance as it doesn't know the connection was closed by server\n\t//\t- To handle this we execute a query, by doing which the connection will re-established with the server\n\n\tvar err error\n\n\tif _, err = dbProvider.instance.DB().Exec(\"SELECT 1\"); err != nil {\n\t\tlogger.Error(nil, crerrors.CodeDatabaseError, err, nil)\n\t}\n\n\treturn err\n}", "func (c *Check) Health(ctx context.Context, w http.ResponseWriter, r *http.Request) error {\n\tctx, span := trace.StartSpan(ctx, \"handlers.Check.Health\")\n\tdefer span.End()\n\n\tvar health struct {\n\t\tStatus string `json:\"status\"`\n\t}\n\n\t// Check if the database is ready.\n\tif err := database.StatusCheck(ctx, c.db); err != nil {\n\n\t\t// If the database is not ready we will tell the client and use a 500\n\t\t// status. Do not respond by just returning an error because further up in\n\t\t// the call stack will interpret that as an unhandled error.\n\t\thealth.Status = \"db not ready\"\n\t\treturn web.Respond(ctx, w, health, http.StatusInternalServerError)\n\t}\n\n\thealth.Status = \"ok\"\n\treturn web.Respond(ctx, w, health, http.StatusOK)\n}", "func (s *orm) Check(ctx context.Context) {\n\ts.client.DB().PingContext(ctx)\n}", "func (db *DB) Check() ([]string, bool) {\n\tif err := db.Ping(); err != nil {\n\t\treturn []string{err.Error()}, false\n\t}\n\treturn []string{\"database connection ok\"}, true\n}", "func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {\n\tdsHandler, err := s.getDSInfo(ctx, req.PluginContext)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = dsHandler.Ping()\n\n\tif err != nil {\n\t\tlogger.Error(\"Check health failed\", \"error\", err)\n\t\treturn &backend.CheckHealthResult{Status: backend.HealthStatusError, Message: dsHandler.TransformQueryError(logger, err).Error()}, nil\n\t}\n\n\treturn &backend.CheckHealthResult{Status: backend.HealthStatusOk, Message: \"Database Connection OK\"}, nil\n}", "func HealthCheck(db *gorm.DB) func(c *gin.Context) {\n\treturn func(c *gin.Context) {\n\t\tginutils.FormatResponse(c, http.StatusNoContent, \"\", \"\", \"\", utils.WhereAmI())\n\t}\n}", "func (s *SQLDBMonitor) Ping() bool {\n\tdb, err := sql.Open(s.sqlType, s.URI)\n\tif err != nil {\n\t\treturn false\n\t}\n\tif err := db.Ping(); err != nil {\n\t\treturn false\n\t}\n\treturn true\n}", "func (h *MysqlHealthCheck) DoHealthCheck() (resHealthCheck *view.ResHealthCheck, err error) {\n\tif h.DSN == \"\" {\n\t\terr = errors.New(\"mysql dsn is nil\")\n\t\treturn\n\t}\n\t_, err = ParseDSN(h.DSN)\n\tif err != nil {\n\t\treturn\n\t}\n\tif !strings.Contains(h.DSN, \"timeout\") {\n\t\th.DSN += \"&timeout=\" + DefaultTimeOut\n\t}\n\tdb, err := Open(\"mysql\", h)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer db.Close()\n\tif db == nil {\n\t\terr = errors.New(\"can not get mysql connection\")\n\t\treturn\n\t}\n\tif err := db.DB().Ping(); err != nil {\n\t\treturn nil, err\n\t}\n\tresHealthCheck = view.HealthCheckResult(\"mysql\", true, \"success\")\n\treturn\n}", "func Health(pg *sqlx.DB, rClient *redis.Pool) Handler {\n\treturn func(ctx context.Context, w http.ResponseWriter, r *http.Request) {\n\t\tres := struct {\n\t\t\tHealthy bool `json:\"healthy\"`\n\t\t\tServices map[string]bool `json:\"services\"`\n\t\t}{\n\t\t\tHealthy: true,\n\t\t\tServices: map[string]bool{\n\t\t\t\t\"postgres\": true,\n\t\t\t\t\"redis\": true,\n\t\t\t},\n\t\t}\n\n\t\tif _, err := pg.Exec(pgHealthcheck); err != nil {\n\t\t\tres.Healthy = false\n\t\t\tres.Services[\"postgres\"] = false\n\n\t\t\trespondJSON(w, 500, &res)\n\t\t\treturn\n\t\t}\n\n\t\tconn := rClient.Get()\n\t\tif err := conn.Err(); err != nil {\n\t\t\tres.Healthy = false\n\t\t\tres.Services[\"redis\"] = false\n\n\t\t\trespondJSON(w, 500, &res)\n\t\t\treturn\n\t\t}\n\t\tdefer conn.Close()\n\n\t\trespondJSON(w, http.StatusOK, &res)\n\t}\n}", "func (secretsDb *DB) Ping() error {\n\treturn secretsDb.Db.Ping()\n}", "func (p *persistenceLayer) CheckHealth() error {\n\treturn p.dal.Ping()\n}", "func (d *Database) Ping() error {\n\treturn d.Database.Ping()\n}", "func (p Config) Ping() error {\n\tconn, err := db.DB()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn conn.Ping()\n}", "func DBCheck(next http.HandlerFunc) http.HandlerFunc {\n\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\n\t\tif db.ConnectionCheck() == 0 {\n\t\t\thttp.Error(w, \"DB Connection lost.\", http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tnext.ServeHTTP(w, r)\n\n\t}\n\n}", "func (d *Database) Ping() error {\n\treturn d.Conn.Ping()\n}", "func (sp *ServerPool) HealthCheck() {\n\tfor _, server := range sp.servers {\n\t\tstatus, load := heartbeat.PingServer(server.URL)\n\t\t//TODO: Ping each server 3 times? To determine if healthy\n\t\tserver.SetOnline(status)\n\t\tserver.SetResponseTime(load)\n\t}\n}", "func (db *Redis) Health() error {\n\treturn db.Ping().Err()\n}", "func (env *Env) ReadinessCheck(w http.ResponseWriter, r *http.Request, ps httprouter.Params) {\n\terr := env.db.Ping()\n\tif err != nil {\n\t\tlog.Println(err)\n\t\thttputil.SendErr(w, httputil.InternalServerError)\n\t\treturn\n\t}\n\thttputil.SendOK(w)\n}", "func pingDatabase(db *sql.DB) (err error) {\n\tfor i := 0; i < 30; i++ {\n\t\terr = db.Ping()\n\t\tif err == nil {\n\t\t\treturn\n\t\t}\n\t\tlogrus.Infof(\"database ping failed. retry in 1s\")\n\t\ttime.Sleep(time.Second)\n\t}\n\treturn\n}", "func (d *DB) Ping() error {\n\t_, err := d.DB.Exec(\"SELECT 1\")\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func (database *Database) Ping() error {\n\treturn database.PingContext(context.Background())\n}", "func (db *DB) Ping() (string, error) {\n\n\tvar response string\n\ttimeout := time.Second * 3\n\tconn, err := net.DialTimeout(\"tcp\", net.JoinHostPort(db.host, \"8093\"), timeout)\n\tif err != nil {\n\t\tresponse = fmt.Sprintf(\"Connection error %v\", err.Error())\n\t\treturn response, err\n\t}\n\tif conn != nil {\n\t\tdefer conn.Close()\n\t\tresponse = fmt.Sprintf(\"Connection successful to %v\", net.JoinHostPort(db.host, \"8093\"))\n\t}\n\n\treturn response, nil\n}", "func handleHealthCheck(m *MicroService, d *net.Dialer) bool {\r\n\tchange := false\r\n\tfor i, inst := range m.Instances {\r\n\t\t_, err := d.Dial(\"tcp\", inst.Host)\r\n\t\tif err != nil {\r\n\t\t\tif !m.isBlacklisted(i) {\r\n\t\t\t\tm.blackList(i, true)\r\n\t\t\t\tlogInfo(\"Instance: \" + inst.Host + \" is now marked as DOWN\")\r\n\t\t\t\tchange = true\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tif m.isBlacklisted(i) {\r\n\t\t\t\tm.blackList(i, false)\r\n\t\t\t\tlogInfo(\"Instance: \" + inst.Host + \" is now marked as UP\")\r\n\t\t\t\tchange = true\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\treturn change\r\n}", "func (redClient *RedisInstance) HealthCheck(w http.ResponseWriter, r *http.Request) {\n\n\t// [ Ping Redis server, for checking connection ]\n\tpingResponse := u.Ping(redClient.RInstance)\n\tif pingResponse[\"status\"] != true {\n\t\tu.Respond(w, u.Message(true, pingResponse[\"message\"].(string)))\n\t\treturn\n\t}\n\n\tu.Respond(w, u.Message(true, \"Health check OK\"))\n\treturn\n}", "func (a *adapter) HealthCheck() (string, error) {\n\terr := a.client.checkHealthy()\n\tif err == nil {\n\t\treturn model.Healthy, nil\n\t}\n\treturn model.Unhealthy, err\n}", "func (c *ConsulDB) CheckDatabase() error {\n\tkv := c.consulClient.KV()\n\t_, _, err := kv.Get(\"test\", nil)\n\tif err != nil {\n\t\treturn pkgerrors.New(\"[ERROR] Cannot talk to Datastore. Check if it is running/reachable.\")\n\t}\n\treturn nil\n}", "func (db *Postgres) ping() error {\n\treturn db.Db.Ping()\n}", "func (service *DaemonHeartbeat) Check(ctx context.Context, req *grpc_health_v1.HealthCheckRequest) (*grpc_health_v1.HealthCheckResponse, error) {\n\n\theartbeat, err := GetHeartbeat(config.GetString(config.HeartbeatServiceEndpoint), config.GetString(config.ServiceHeartbeatType),\n\t\tconfig.GetString(config.ServiceId))\n\n\tif strings.Compare(heartbeat.Status, Online.String()) == 0 {\n\t\treturn &grpc_health_v1.HealthCheckResponse{Status: grpc_health_v1.HealthCheckResponse_SERVING}, nil\n\t}\n\n\treturn &grpc_health_v1.HealthCheckResponse{Status: grpc_health_v1.HealthCheckResponse_SERVICE_UNKNOWN}, errors.New(\"Service heartbeat unknown \" + err.Error())\n}", "func (d *DB) Health() (bool, error) {\n\tsess, err := d.Session()\n\tif err != nil {\n\t\treturn false, err\n\t}\n\n\tq := sess.Query(\"SELECT now() FROM system.local\")\n\tif err = q.Exec(); err != nil {\n\t\treturn false, err\n\t}\n\n\treturn true, nil\n}", "func (ds *redisDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {\n\tvar status backend.HealthStatus\n\tmessage := \"Data Source health is yet to become known.\"\n\n\t// Get Instance\n\tclient, err := ds.getInstance(req.PluginContext)\n\n\tif err != nil {\n\t\tstatus = backend.HealthStatusError\n\t\tmessage = fmt.Sprintf(\"getInstance error: %s\", err.Error())\n\t} else {\n\t\terr = client.RunCmd(&message, \"PING\")\n\n\t\t// Check errors\n\t\tif err != nil {\n\t\t\tstatus = backend.HealthStatusError\n\t\t\tmessage = fmt.Sprintf(\"PING command failed: %s\", err.Error())\n\t\t} else {\n\t\t\tstatus = backend.HealthStatusOk\n\t\t\tmessage = \"Data Source is working as expected.\"\n\t\t}\n\t}\n\n\t// Return Health result\n\treturn &backend.CheckHealthResult{\n\t\tStatus: status,\n\t\tMessage: message,\n\t}, nil\n}", "func (ds *redisDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {\n\tvar status = backend.HealthStatusUnknown\n\tvar message = \"Data source health is yet to become known\"\n\n\t// Get Instance\n\tclient, err := ds.getInstance(req.PluginContext)\n\n\tif err != nil {\n\t\tstatus = backend.HealthStatusError\n\t\tmessage = fmt.Sprintf(\"getInstance error: %s\", err.Error())\n\t} else {\n\t\terr = client.Do(radix.Cmd(&message, \"PING\"))\n\n\t\t// Check errors\n\t\tif err != nil {\n\t\t\tstatus = backend.HealthStatusError\n\t\t} else {\n\t\t\tstatus = backend.HealthStatusOk\n\t\t\tmessage = \"Data source working as expected\"\n\t\t}\n\t}\n\n\t// Return Health result\n\treturn &backend.CheckHealthResult{\n\t\tStatus: status,\n\t\tMessage: message,\n\t}, nil\n}", "func health(w http.ResponseWriter, _ *http.Request) {\n\tif !Storage.Healthy() {\n\t\tmsg := \"database is not healthy\"\n\t\thttp.Error(w, msg, http.StatusServiceUnavailable)\n\t}\n}", "func (c *ConsulStore) HealthCheck() error {\n\t_, _, err := c.client.Get(\"test\", nil)\n\tif err != nil {\n\t\treturn pkgerrors.New(\"[ERROR] Cannot talk to Datastore. Check if it is running/reachable.\")\n\t}\n\treturn nil\n}", "func ping(credential *types.Credential) (string, error) {\n\tvar db *sql.DB\n\tvar err error\n\n\tdb, err = util.GetMonitoringConnection(credential.Host,\n\t\tcredential.Username, credential.Port, credential.Database,\n\t\tcredential.Password)\n\tdefer db.Close()\n\tif err != nil {\n\t\tlogit.Error.Println(\"error in getting connectionto \" + credential.Host)\n\t\treturn \"down\", err\n\t}\n\n\tvar result string\n\terr = db.QueryRow(\"select now()::text\").Scan(&result)\n\tif err != nil {\n\t\tlogit.Error.Println(\"could not ping db on \" + credential.Host)\n\t\treturn \"down\", err\n\t}\n\treturn \"up\", nil\n\n}", "func CheckConnection() int {\n err := MongoConnection.Ping(context.TODO(), nil)\n if err != nil {\n return 0\n }\n return 1\n}", "func (a adapter) HealthCheck() (model.HealthStatus, error) {\n\tvar err error\n\tif a.registry.Credential == nil ||\n\t\tlen(a.registry.Credential.AccessKey) == 0 || len(a.registry.Credential.AccessSecret) == 0 {\n\t\tlog.Errorf(\"no credential to ping registry %s\", a.registry.URL)\n\t\treturn model.Unhealthy, nil\n\t}\n\tif err = a.PingGet(); err != nil {\n\t\tlog.Errorf(\"failed to ping registry %s: %v\", a.registry.URL, err)\n\t\treturn model.Unhealthy, nil\n\t}\n\treturn model.Healthy, nil\n}", "func (s *Service) Ping(ctx context.Context) error {\n\treturn s.mysql.Ping(ctx)\n}", "func (a *AbuseIPDB) Check(ipaddr net.IP) error {\n\tapiKey, err := getConfigValue(\"ABUSEIPDB_API_KEY\")\n\tif err != nil {\n\t\treturn fmt.Errorf(\"can't call API: %w\", err)\n\t}\n\n\theaders := map[string]string{\n\t\t\"Key\": apiKey,\n\t\t\"Accept\": \"application/json\",\n\t\t\"Content-Type\": \"application/x-www-form-urlencoded\",\n\t}\n\n\tqueryParams := map[string]string{\n\t\t\"ipAddress\": ipaddr.String(),\n\t\t\"maxAgeInDays\": maxAgeInDays,\n\t}\n\n\tresp, err := makeAPIcall(\"https://api.abuseipdb.com/api/v2/check\", headers, queryParams)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn fmt.Errorf(\"calling API: %s\", resp.Status)\n\t}\n\n\tif err := json.NewDecoder(resp.Body).Decode(a); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (td *SampleDatasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {\n\tvar status = backend.HealthStatusOk\n\tvar message = \"Data source is working\"\n\n\tif rand.Int()%2 == 0 {\n\t\tstatus = backend.HealthStatusError\n\t\tmessage = \"randomized error\"\n\t}\n\n\treturn &backend.CheckHealthResult{\n\t\tStatus: status,\n\t\tMessage: message,\n\t}, nil\n}", "func pingDatabase(ctx context.Context, dbConn *core_database.DatabaseConn) error {\n\tdb, err := dbConn.Engine.DB()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := db.Ping(); err != nil {\n\t\treturn err\n\t}\n\n\tsetConnectionConfigs(ctx, dbConn)\n\treturn nil\n}", "func CheckDB(next http.HandlerFunc) http.HandlerFunc {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tif bd.CheckConnection() == false {\n\t\t\thttp.Error(w, \"Lost database connection\", 500)\n\t\t\treturn\n\t\t}\n\t\tnext.ServeHTTP(w, r)\n\t}\n}", "func (p *RoundRobinPool) HealthCheck() {\n\n\tt := time.NewTicker(time.Minute * 2)\n\tfor {\n\t\tselect {\n\t\tcase <-t.C:\n\t\t\tlog.Println(\"starting health check...\")\n\t\t\tp.HealthCheckUp()\n\t\t\tlog.Println(\"Health check completed\")\n\t\t}\n\t}\n}", "func (c clientGRPC) HealthCheck(url *url.URL) error {\n\tconn, err := getConn(url.Host, c.options)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer conn.Close()\n\n\thealthClient := healthpb.NewHealthClient(conn)\n\tctx, cancel := context.WithTimeout(context.Background(), c.options.Timeout)\n\tdefer cancel()\n\n\thealth, err := healthClient.Check(ctx, &healthpb.HealthCheckRequest{})\n\tif err != nil {\n\t\treturn err\n\t}\n\tif health.Status != healthpb.HealthCheckResponse_SERVING {\n\t\treturn ErrServiceNotAvailable\n\t}\n\treturn nil\n}", "func (dal *DAL) Ping() error {\n\ti := 0\n\t_, err := dal.db.QueryOne(pg.Scan(&i), \"SELECT 1\")\n\treturn err\n}", "func (dal *DAL) Ping() error {\n\ti := 0\n\t_, err := dal.db.QueryOne(pg.Scan(&i), \"SELECT 1\")\n\treturn err\n}", "func (v *View) health(c echo.Context) error {\n\tif err := v.core.DB.Ping(); err != nil {\n\t\tc.Logger().Error(err)\n\t\treturn c.String(http.StatusInternalServerError, \"unhealthy\")\n\t}\n\treturn c.String(http.StatusOK, \"healthy\")\n}", "func (c *Client) Ping() error {\n\treturn c.Client.DB().Ping()\n}", "func Health(port string, pg *pq.Listener) *http.ServeMux {\n\n\thealthz.Register(\"postgres\", time.Second*5, func() error {\n\t\treturn pg.Ping()\n\t})\n\n\tmux := http.NewServeMux()\n\n\tpath := os.Getenv(\"HEALTH_PATH\")\n\tif path == \"\" {\n\t\tpath = \"/health\"\n\t}\n\n\tmux.Handle(path, healthz.Handler())\n\thttp.ListenAndServe(\":\"+os.Getenv(\"HEALTH_PORT\"), mux)\n\treturn mux\n}", "func checkHealth(server *domain.Server) {\n\t// We will consider a server to be healthy if we can open a tcp connection\n\t// to the host:port of the server within a reasonable time frame.\n\t_, err := net.DialTimeout(\"tcp\", server.Url.Host, time.Second*5)\n\tif err != nil {\n\t\tlog.Errorf(\"Could not connect to the server at '%s'\", server.Url.Host)\n\t\told := server.SetLiveness(false)\n\t\tif old {\n\t\t\tlog.Warnf(\"Transitioning server '%s' from Live to Unavailable state\", server.Url.Host)\n\t\t}\n\t\treturn\n\t}\n\told := server.SetLiveness(true)\n\tif !old {\n\t\tlog.Infof(\"Transitioning server '%s' from Unavailable to Live state\", server.Url.Host)\n\t}\n}", "func Ping() {\n\terr := db.Ping()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(\"Successfully connected!\")\n}", "func (db *MockDB) Ping(context.Context) error {\n\treturn nil\n}", "func (db *DB) Ping() error {\n\tif err := db.master.Ping(); err != nil {\n\t\treturn err\n\t}\n\n\tdb.RLock()\n\tdefer db.RUnlock()\n\tfor _, readReplica := range db.readReplicas {\n\t\tif err := readReplica.Ping(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func healthCheck(w http.ResponseWriter, r *http.Request) {\n\tw.Write([]byte(\"Ready\"))\n}", "func isAlive(db *gosql.DB, l *logger) bool {\n\t// The cluster might have just restarted, in which case the first call to db\n\t// might return an error. In fact, the first db.Ping() reliably returns an\n\t// error (but a db.Exec() only seldom returns an error). So, we're gonna\n\t// Ping() twice to allow connections to be re-established.\n\t_ = db.Ping()\n\tif err := db.Ping(); err != nil {\n\t\tl.Printf(\"isAlive returned err=%v (%T)\", err, err)\n\t} else {\n\t\treturn true\n\t}\n\treturn false\n}", "func (db Database) VerifyConnection() (err error) {\n\tif db.checkConnection() {\n\t\tlog.Info(\"Database is reachable\")\n\t} else {\n\t\tlog.Fatal(\"Database is unreachable\")\n\t}\n\treturn\n}", "func CheckConnectionDB() int {\n\terr := MongoCN.Ping(context.TODO(), nil)\n\tif err != nil {\n\t\treturn 0\n\t}\n\n\treturn 1\n}", "func databaseCheck(redisClient *redis.Client, logger log.Logger) (ok bool, err error) {\n\t// Checking if letters list exists.\n\tlogger.Info(\"Checking database\")\n\tvar flag int64\n\tflag, err = redisClient.Exists(\"letters\").Result()\n\tif err != nil {\n\t\treturn\n\t}\n\tif flag == 0 {\n\t\treturn\n\t}\n\tvar letters []string\n\tletters, err = redisClient.SMembers(\"letters\").Result()\n\tsort.Strings(letters)\n\n\tlogger.Info(\"Letters\", \"letters\", letters)\n\tfor _, i := range letters {\n\t\tvar flag int64\n\t\tflag, err = redisClient.Exists(\"letter:\" + i).Result()\n\t\tif err != nil {\n\t\t\terr = errors.New(\"Damaged database. You should write in teminal: \\\"redis-cli\\\", \\\"FLUSHALL\\\" and restart app\")\n\t\t\treturn\n\t\t}\n\t\tif flag == 0 {\n\t\t\treturn\n\t\t}\n\t\tvar letterNames string\n\t\tletterNames, err = redisClient.Get(\"letter:\" + i).Result()\n\t\tlogger.Info(\"\\\"Letter : Names\\\"\", \"Letter\", i, \"Names\", letterNames)\n\t\tnames := strings.Split(letterNames, \"\\n\")\n\t\tfor _, name := range names {\n\t\t\tvar link string\n\t\t\tlink, err = redisClient.Get(\"name:\" + strings.ToLower(name)).Result()\n\t\t\tif err != nil {\n\t\t\t\terr = errors.New(\"Damaged database. You should write in teminal: \\\"redis-cli\\\", \\\"FLUSHALL\\\" and restart app\")\n\t\t\t\treturn\n\t\t\t}\n\t\t\tlogger.Info(\"\\\"Name : Link\\\"\", \"Name\", name, \"Link\", link)\n\t\t}\n\t}\n\n\t// Database if ready.\n\tlogger.Info(\"Database is ready\")\n\tok = true\n\treturn\n}", "func (p *PgSQL) Ping() error {\n\treturn p.Connection.Ping()\n}", "func (s *Store) Ping() error {\n\treturn s.db.Ping()\n}", "func (db *Db) CheckReady() error {\n\tif db.session.Closed() {\n\t\treturn errors.New(\"Session is closed\")\n\t}\n\n\tif err := db.session.Query(\"SELECT now() FROM system.local\").Exec(); err != nil {\n\t\treturn fmt.Errorf(\"Health check query failed: %s\", err.Error())\n\t}\n\n\treturn nil\n}", "func (p *RoundRobinPool) HealthCheckUp() {\n\tfor _, b := range p.servers {\n\t\tstatus := \"up\"\n\t\talive := p.isServerAlive(b.URL)\n\t\tb.SetAlive(alive)\n\t\tif !alive {\n\t\t\tstatus = \"down\"\n\t\t}\n\t\tlog.Printf(\"%s [%s]\\n\", b.URL, status)\n\t}\n}", "func PingDB(ctx context.Context, db *sql.DB) error {\n\trows, err := db.QueryContext(ctx, \"select 1\")\n\tif err != nil {\n\t\treturn driver.ErrBadConn // https://golang.org/pkg/database/sql/driver/#Pinger\n\t}\n\tdefer rows.Close()\n\treturn nil\n}", "func (my *MySQL) Ping() (err os.Error) {\n defer my.unlock()\n defer catchOsError(&err)\n my.lock()\n\n if my.conn == nil {\n return NOT_CONN_ERROR\n }\n if my.unreaded_rows {\n return UNREADED_ROWS_ERROR\n }\n\n // Send command\n my.sendCmd(_COM_PING)\n // Get server response\n my.getResult(nil)\n\n return\n}", "func (c *Client) HealthCheck() (*HealthStatus, error) {\n\treq, err := http.NewRequest(\"GET\", c.Host+\"/health\", nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar status HealthStatus\n\tif err := c.doReq(req, http.StatusOK, &status); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &status, nil\n}", "func healthCheckVM(conn net.Conn) {\n\tfor {\n\t\tconn.Write([]byte{0})\n\t\ttime.Sleep(2 * time.Second)\n\t}\n}", "func (c *Canary) HealthChecking(config schemas.Config) error {\n\thealthy := false\n\n\tfor !healthy {\n\t\tc.Logger.Debugf(\"Start Timestamp: %d, timeout: %s\", config.StartTimestamp, config.Timeout)\n\t\tisTimeout, _ := tool.CheckTimeout(config.StartTimestamp, config.Timeout)\n\t\tif isTimeout {\n\t\t\treturn fmt.Errorf(\"timeout has been exceeded : %.0f minutes\", config.Timeout.Minutes())\n\t\t}\n\n\t\tisDone, err := c.Deployer.HealthChecking(config)\n\t\tif err != nil {\n\t\t\treturn errors.New(\"error happened while health checking\")\n\t\t}\n\n\t\tif isDone {\n\t\t\thealthy = true\n\t\t} else {\n\t\t\ttime.Sleep(config.PollingInterval)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (db *DB) heartbeatChecker() {\n\topts := goworkers.Options{Workers: numWorkers}\n\tgw := goworkers.New(opts)\n\tdefer gw.Stop(false)\n\n\tticker := time.NewTicker(heartbeatPeriod)\n\tdefer ticker.Stop()\n\n\tfor {\n\t\tselect {\n\t\tcase <-db.killHealthCheck:\n\t\t\treturn\n\t\tcase <-ticker.C:\n\t\t\tdb.RLock()\n\t\t\tfor i, rR := range db.readReplicas {\n\t\t\t\tidx := uint32(i)\n\t\t\t\treadReplica := rR\n\t\t\t\t// if a DB is unreachable, check if it needs to be quarantined\n\t\t\t\tif err := checkBeat(readReplica); err != nil {\n\t\t\t\t\tgw.Submit(func() {\n\t\t\t\t\t\t// attempt reconnection\n\t\t\t\t\t\t_, err := db.reconnectDB(idx, readReplica)\n\t\t\t\t\t\t// reconnect failure\n\t\t\t\t\t\t// quarantine if not already\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\tdb.quarantineReadReplica(idx)\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t}\n\t\t\tdb.RUnlock()\n\t\t\tgw.Wait(false)\n\n\t\t\tdb.resurrectMu.Lock()\n\t\t\tfor idx, readReplica := range db.resurrect {\n\t\t\t\tdb.Lock()\n\t\t\t\t// replace the old reader with the resurrected one\n\t\t\t\tdb.readReplicas[idx] = readReplica\n\t\t\t\tdb.Unlock()\n\t\t\t\tdelete(db.resurrect, idx)\n\t\t\t}\n\t\t\tdb.resurrectMu.Unlock()\n\t\t}\n\t}\n}", "func (db Database) checkConnection() bool {\n\t// Connect to database\n\tconnection, err := sql.Open(\"postgres\", db.getDBConnectionString())\n\tif err != nil {\n\t\treturn false\n\t}\n\t// Ping database\n\terr = connection.Ping()\n\treturn err == nil\n}", "func ping() error {\n\tfor i := 0; i < 10; i++ {\n\t\t// Ping the server by sending a GET request to `/health`.\n\t\tresp, err := http.Get(\"http://localhost\" + viper.GetString(\"addr\") + \"/\")\n\t\tif err == nil && resp.StatusCode == 200 {\n\t\t\treturn nil\n\t\t}\n\n\t\t// Sleep for a second to continue the next ping.\n\t\tlog.Infoln(\"Waiting for the router, retry in 1 second.\")\n\t\ttime.Sleep(time.Second)\n\t}\n\treturn errors.New(\"app is not working\")\n}", "func Healthcheck(context *gin.Context) {\n\tcontext.JSON(http.StatusOK, gin.H{\"message\": \"App up and running\"})\n}", "func healthcheck(ha *lib.HTTPAdapter) {\n\tr := mux.NewRouter()\n\tr.HandleFunc(\"/health\", ha.HealthHandler).Methods(\"GET\")\n\taddr := fmt.Sprintf(\"127.0.0.1:%v\", serverConfig.HealthcheckHTTPport)\n\tserver := &http.Server{Addr: addr, Handler: r}\n\tlogger.Printf(\"HTTP healthcheck listening on: %v\", addr)\n\tlogger.Println(server.ListenAndServe())\n}", "func (pg *PG) Health(ctx context.Context) error {\n\tvar tmp string\n\treturn pg.db.QueryRow(ctx, \"SELECT version();\").Scan(&tmp)\n}", "func Ping(ctx context.Context) {\n\tctx, cancel := context.WithTimeout(ctx, 1*time.Second)\n\tdefer cancel()\n\n\tif err := pool.PingContext(ctx); err != nil {\n\t\tlog.Fatalf(\"unable to connect to database: %v\", err)\n\t}\n}", "func (r *RedisHealthChecker) CheckHealth() error {\n\treturn r.redisClient.Ping().Err()\n}", "func (m *Monitor) HealthCheck(healthChannel chan bool) {\n\tres, resErr := http.Get(fmt.Sprintf(\"http://%s\", *m.MasterIP))\n\tif resErr != nil {\n\t\tlog.Println(resErr)\n\t\thealthChannel <- false\n\t} else if res.Status != \"200 OK\" {\n\t\tlog.Println(fmt.Errorf(\"healthcheck responded with status other than 200 OK, %s\", res.Status))\n\t\thealthChannel <- false\n\t} else {\n\t\tlog.Println(*m.MasterIP, \"passes health check\")\n\t\thealthChannel <- true\n\t}\n\treturn\n}", "func CheckPing(addr string) bool {\n\tgotit := false\n\tfor {\n\t\tconn, err := net.Dial(\"tcp\", addr+\":22\")\n\t\tif err == nil {\n\t\t\t// log.Fatal(err.Error())\n\t\t\tfmt.Print(\"got conncetion\")\n\t\t\tgotit = true\n\t\t\tconn.Close()\n\t\t\tbreak\n\t\t}\n\t\ttime.Sleep(10 * time.Second)\n\t}\n\treturn gotit\n}", "func (ping) Check() error {\n\treturn nil\n}" ]
[ "0.79334533", "0.74797064", "0.7348943", "0.7328651", "0.7134399", "0.71117795", "0.70077723", "0.69794774", "0.6870542", "0.6815102", "0.68096006", "0.67751133", "0.67025125", "0.66956794", "0.66566247", "0.6622511", "0.66113156", "0.661117", "0.661117", "0.6598455", "0.65929824", "0.6573979", "0.65674406", "0.6561874", "0.65523267", "0.6504659", "0.6487495", "0.64733946", "0.646489", "0.64600825", "0.643079", "0.6414374", "0.63929516", "0.6368408", "0.6350477", "0.6334015", "0.6331389", "0.6318209", "0.63113385", "0.6310024", "0.6301401", "0.62929106", "0.6279629", "0.62584245", "0.62249786", "0.62072533", "0.62054855", "0.6203116", "0.6199575", "0.6194095", "0.61914253", "0.61679965", "0.61641276", "0.6162296", "0.6098726", "0.6098661", "0.6097387", "0.60963744", "0.60917693", "0.60829127", "0.6061669", "0.60589993", "0.6058178", "0.6046444", "0.6025707", "0.6024348", "0.6020799", "0.6020799", "0.60157984", "0.60103405", "0.600531", "0.6001517", "0.59871656", "0.598221", "0.59784627", "0.59609175", "0.59512997", "0.5948351", "0.59392095", "0.593411", "0.59267783", "0.5922266", "0.59155273", "0.59135807", "0.5892386", "0.5886555", "0.588208", "0.5880461", "0.5867734", "0.58548206", "0.58453685", "0.584413", "0.5843579", "0.5841746", "0.5840866", "0.58390373", "0.5828978", "0.5801084", "0.5794739", "0.5785147" ]
0.74543005
2
checkAndEnableProfiling checks for FABRIC_CA_SERVER_PROFILE_PORT env variable if it is set, starts listening for profiling requests at the port specified by the environment variable
func (s *Server) checkAndEnableProfiling() error { // Start listening for profile requests pport := os.Getenv(fabricCAServerProfilePort) if pport != "" { iport, err := strconv.Atoi(pport) if err != nil || iport < 0 { log.Warningf("Profile port specified by the %s environment variable is not a valid port, not enabling profiling", fabricCAServerProfilePort) } else { addr := net.JoinHostPort(s.Config.Address, pport) listener, err1 := net.Listen("tcp", addr) log.Infof("Profiling enabled; listening for profile requests on port %s", pport) if err1 != nil { return err1 } go func() { log.Debugf("Profiling enabled; waiting for profile requests on port %s", pport) err := http.Serve(listener, nil) log.Errorf("Stopped serving for profiling requests on port %s: %s", pport, err) }() } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func runProfiler(port int) {\n\tgo func() {\n\t\tlog.Info().Err(http.ListenAndServe(fmt.Sprintf(\"localhost:%d\", port), nil)).Send()\n\t}()\n}", "func StartPProfListen(addr string) {\n\tgo func() {\n\t\tlog.Println(\"pprof listening:\", addr)\n\t\tif err := http.ListenAndServe(addr, nil); err != nil {\n\t\t\tlog.Panicln(\"pprof listening:\", err)\n\t\t}\n\t}()\n}", "func StartPProfListen(addr string) {\n\tgo func() {\n\t\tlog.Println(\"pprof listening:\", addr)\n\t\tif err := http.ListenAndServe(addr, nil); err != nil {\n\t\t\tlog.Panicln(\"pprof listening:\", err)\n\t\t}\n\t}()\n}", "func debugpprof() {\n\tif conf.Conf.App.Debug {\n\t\tpprofServer := &http.Server{Addr: conf.Conf.App.Pprof}\n\t\tgo pprofServer.ListenAndServe()\n\t}\n}", "func InitPprof(pprofBind []string) {\n\tpprofServeMux := http.NewServeMux()\n\tpprofServeMux.HandleFunc(\"/debug/pprof/\", pprof.Index)\n\tpprofServeMux.HandleFunc(\"/debug/pprof/cmdline\", pprof.Cmdline)\n\tpprofServeMux.HandleFunc(\"/debug/pprof/profile\", pprof.Profile)\n\tpprofServeMux.HandleFunc(\"/debug/pprof/symbol\", pprof.Symbol)\n\tfor _, addr := range pprofBind {\n\t\tgo func() {\n\t\t\tif err := http.ListenAndServe(addr, pprofServeMux); err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}()\n\t}\n}", "func setupProfiling() {\n\t//TODO Save it in etcd ?\n\trestful.Filter(func(request *restful.Request, response *restful.Response, chain *restful.FilterChain) {\n\t\tcfg := profile.Config{\n\t\t\tQuiet: false,\n\t\t\tProfilePath: \"./.profile/\" + FormatMethod(request),\n\t\t\tCPUProfile: true,\n\t\t\tMemProfile: true,\n\t\t\tNoShutdownHook: true, // do not hook SIGINT\n\t\t}\n\t\tp := profile.Start(&cfg)\n\t\tdefer p.Stop()\n\n\t\tlog.Infof(\"[global-filter (logger)] Request received: %s %s\\n\", request.Request.Method, request.Request.URL)\n\t\tnow := time.Now()\n\t\tchain.ProcessFilter(request, response)\n\t\tlog.Infof(\"[global-filter (timer)] Request processed in %v\\n\", time.Now().Sub(now))\n\t})\n}", "func init() {\n\truntime.GOMAXPROCS(8)\n\t// Define a profilerURL for your program to use | you might sometime have to use different port if 8080 is not available\n\tprofilerPort := \":8080\"\n\tgo func() {\n\t\terr := http.ListenAndServe(profilerPort, nil)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error with ListenAndServe: \", profilerPort, err.Error())\n\t\t\tos.Exit(1)\n\t\t}\n\t}()\n}", "func TestWithProfilingEnabled(t *testing.T) {\n\tmockLogStore := new(MockedLogStore)\n\tserver := NewHTTP(&Config{BindAddress: \"127.0.0.1:8080\", EnableProfiling: true}, mockLogStore)\n\ttestServer := httptest.NewServer(server.server.Handler)\n\tdefer testServer.Close()\n\tclient := testServer.Client()\n\n\tresp, _ := client.Get(testServer.URL + \"/debug/pprof/heap\")\n\trequire.Equalf(t, http.StatusOK, resp.StatusCode, \"unexpected status code\")\n}", "func (a *AppBuilder) Profiling() *AppBuilder {\n\ta.init()\n\n\tprofilingHandler := profiling.NewHandler(a.Logger, false)\n\ta.ProfilingServer = profiling.NewServer(profilingHandler)\n\n\tsharedmain.WatchObservabilityConfigOrDie(a.Context, a.ConfigMapWatcher, profilingHandler, a.Logger, a.Name)\n\n\ta.startFunc = append(a.startFunc, func(ctx context.Context) error {\n\t\treturn a.ProfilingServer.ListenAndServe()\n\t})\n\n\treturn a\n}", "func Start(cfg Config) (*Profiler, error) {\n\tif len(cfg.ProfileTypes) == 0 {\n\t\tcfg.ProfileTypes = types.DefaultProfileTypes\n\t}\n\tif cfg.SampleRate == 0 {\n\t\tcfg.SampleRate = types.DefaultSampleRate\n\t}\n\tif cfg.Logger == nil {\n\t\tcfg.Logger = &agent.NoopLogger{}\n\t}\n\n\trc := remote.RemoteConfig{\n\t\tAuthToken: cfg.AuthToken,\n\t\tUpstreamAddress: cfg.ServerAddress,\n\t\tUpstreamThreads: 4,\n\t\tUpstreamRequestTimeout: 30 * time.Second,\n\t}\n\tupstream, err := remote.New(rc, cfg.Logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsc := agent.SessionConfig{\n\t\tUpstream: upstream,\n\t\tAppName: cfg.ApplicationName,\n\t\tProfilingTypes: types.DefaultProfileTypes,\n\t\tDisableGCRuns: cfg.DisableGCRuns,\n\t\tSpyName: types.GoSpy,\n\t\tSampleRate: cfg.SampleRate,\n\t\tUploadRate: 10 * time.Second,\n\t\tPid: 0,\n\t\tWithSubprocesses: false,\n\t}\n\tsession := agent.NewSession(&sc, cfg.Logger)\n\tif err := session.Start(); err != nil {\n\t\treturn nil, fmt.Errorf(\"start session: %v\", err)\n\t}\n\n\treturn &Profiler{\n\t\tsession: session,\n\t}, nil\n}", "func startProfiling(profileMode string) func() {\n\tswitch profileMode {\n\tcase \"cpu\":\n\t\treturn profile.Start(profile.NoShutdownHook, profile.ProfilePath(\".\"), profile.CPUProfile).Stop\n\tcase \"mem\":\n\t\treturn profile.Start(profile.NoShutdownHook, profile.ProfilePath(\".\"), profile.MemProfile).Stop\n\tcase \"mutex\":\n\t\treturn profile.Start(profile.NoShutdownHook, profile.ProfilePath(\".\"), profile.MutexProfile).Stop\n\tcase \"block\":\n\t\treturn profile.Start(profile.NoShutdownHook, profile.ProfilePath(\".\"), profile.BlockProfile).Stop\n\tdefault:\n\t\treturn func() {}\n\t}\n}", "func Start(cfg Config) (*Profiler, error) {\n\tif len(cfg.ProfileTypes) == 0 {\n\t\tcfg.ProfileTypes = types.DefaultProfileTypes\n\t}\n\tif cfg.SampleRate == 0 {\n\t\tcfg.SampleRate = types.DefaultSampleRate\n\t}\n\tif cfg.Logger == nil {\n\t\tcfg.Logger = &agent.NoopLogger{}\n\t}\n\n\trc := remote.RemoteConfig{\n\t\tAuthToken: cfg.AuthToken,\n\t\tUpstreamAddress: cfg.ServerAddress,\n\t\tUpstreamThreads: 4,\n\t\tUpstreamRequestTimeout: 30 * time.Second,\n\t}\n\tupstream, err := remote.New(rc, cfg.Logger)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsc := agent.SessionConfig{\n\t\tUpstream: upstream,\n\t\tAppName: cfg.ApplicationName,\n\t\tTags: cfg.Tags,\n\t\tProfilingTypes: cfg.ProfileTypes,\n\t\tDisableGCRuns: cfg.DisableGCRuns,\n\t\tSpyName: types.GoSpy,\n\t\tSampleRate: cfg.SampleRate,\n\t\tUploadRate: 10 * time.Second,\n\t\tPid: 0,\n\t\tWithSubprocesses: false,\n\t}\n\tsession, err := agent.NewSession(&sc, cfg.Logger)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"new session: %w\", err)\n\t}\n\tif err = session.Start(); err != nil {\n\t\treturn nil, fmt.Errorf(\"start session: %w\", err)\n\t}\n\n\treturn &Profiler{session: session}, nil\n}", "func setupProfiler(listen string) {\n\tmux := http.NewServeMux()\n\n\t// wrap /profiler/info.html for hit tracking\n\tmux.HandleFunc(\"/profiler/info.html\", func(w http.ResponseWriter, r *http.Request) {\n\t\tatomic.AddUint64(&infoHTMLHitCount, 1)\n\t\tprofiler.MemStatsHTMLHandler(w, r)\n\t})\n\n\t// wrap /profiler/info for hit tracking\n\tmux.HandleFunc(\"/profiler/info\", func(w http.ResponseWriter, r *http.Request) {\n\t\tatomic.AddUint64(&infoHitCount, 1)\n\t\tprofiler.ProfilingInfoJSONHandler(w, r)\n\t})\n\n\tmux.HandleFunc(\"/profiler/start\", profiler.StartProfilingHandler)\n\tmux.HandleFunc(\"/profiler/stop\", profiler.StopProfilingHandler)\n\tlog.Printf(\"Starting profiler on %s\\n\", listen)\n\tgo func() {\n\t\tlog.Fatal(http.ListenAndServe(listen, mux))\n\t}()\n}", "func ServePprof() {\n\tgo servePprof()\n}", "func Start(cfg Config) (*Profiler, error) {\n\tu, err := remote.New(remote.RemoteConfig{\n\t\tAuthToken: cfg.AuthToken,\n\t\tUpstreamAddress: cfg.ServerAddress,\n\t\tUpstreamThreads: 4,\n\t\tUpstreamRequestTimeout: 30 * time.Second,\n\t})\n\n\tu.Logger = cfg.Logger\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// TODO: add sample rate\n\tsess := agent.NewSession(u, cfg.ApplicationName, \"gospy\", 100, 0, false)\n\tsess.Logger = cfg.Logger\n\tsess.Start()\n\n\tp := &Profiler{\n\t\tsess: sess,\n\t}\n\n\treturn p, nil\n}", "func (d *Debug) pprofSetup() {\n\tif d.setup {\n\t\treturn\n\t}\n\n\tfor pat, h := range pprofEndpoints {\n\t\td.endpoints[pat] = d.setupHandler(h)\n\t}\n}", "func startupPprofServices() {\n\tr := chi.NewRouter()\n\n\t// Register pprof handlers\n\tr.HandleFunc(\"/debug/pprof/\", pprof.Index)\n\tr.HandleFunc(\"/debug/pprof/cmdline\", pprof.Cmdline)\n\tr.HandleFunc(\"/debug/pprof/profile\", pprof.Profile)\n\tr.HandleFunc(\"/debug/pprof/symbol\", pprof.Symbol)\n\tr.HandleFunc(\"/debug/pprof/trace\", pprof.Trace)\n\n\tr.Handle(\"/debug/pprof/goroutine\", pprof.Handler(\"goroutine\"))\n\tr.Handle(\"/debug/pprof/allocs\", pprof.Handler(\"allocs\"))\n\tr.Handle(\"/debug/pprof/heap\", pprof.Handler(\"heap\"))\n\tr.Handle(\"/debug/pprof/threadcreate\", pprof.Handler(\"threadcreate\"))\n\tr.Handle(\"/debug/pprof/block\", pprof.Handler(\"block\"))\n\n\terr := http.ListenAndServe(pprofPort, r)\n\tif err != nil {\n\t\tlog.Errorf(\"Error starting up pprof endpoints: %v\", err)\n\t}\n}", "func SetProfiling(on bool) {\n\tif on {\n\t\tEnableProfiling()\n\t} else {\n\t\tDisableProfiling()\n\t}\n}", "func configureProfiler(isProfile *int32, profiler *profile.Profiler, ch chan bool) {\n\ttime.Sleep(time.Duration(*warmUpTime) * time.Second)\n\tatomic.StoreInt32(isProfile, 1)\n\tprofiler.SetWarmUpTime()\n\tlog.Debug(\"Profile started\")\n\ttime.Sleep(time.Duration(*profileTime) * time.Second)\n\tatomic.StoreInt32(isProfile, 0)\n\tprofiler.SetCoolDownTime()\n\tlog.Debug(\"Profile finished\")\n\tch <- true\n}", "func (s *SqlDb) StartProfiling() {\n\ts.profiling = true\n}", "func EnablePprofEndpoints(ws WebServer) {\n\tws.HandleFunc(\"/debug/pprof/\", pprof.Index)\n\tws.HandleFunc(\"/debug/pprof/cmdline\", pprof.Cmdline)\n\tws.HandleFunc(\"/debug/pprof/profile\", pprof.Profile)\n\tws.HandleFunc(\"/debug/pprof/symbol\", pprof.Symbol)\n\tws.HandleFunc(\"/debug/pprof/trace\", pprof.Trace)\n}", "func runProfiler(ctx *cli.Context) (interface{ Stop() }, error) {\n\n\tvar prof interface{ Stop() }\n\n\tcpuProfOn := ctx.GlobalBool(\"cpu-profiling\")\n\tmemProfOn := ctx.GlobalBool(\"memory-profiling\")\n\n\t// Typical (i.e., non-profiling) case.\n\tif !cpuProfOn && !memProfOn {\n\t\treturn nil, nil\n\t}\n\n\t// Cpu and Memory profiling options seem to be mutually exclused in pprof.\n\tif cpuProfOn && memProfOn {\n\t\treturn nil, fmt.Errorf(\"Unsupported parameter combination: cpu and memory profiling\")\n\t}\n\n\tif cpuProfOn {\n\n\t\t// set the profiler's sampling rate at twice the usual to get a\n\t\t// more accurate result (sysbox-runc executes quickly).\n\t\t//\n\t\t// Note: this may result in the following error message when\n\t\t// running sysbox-runc with profiling enabled: \"runtime: cannot\n\t\t// set cpu profile rate until previous profile has finished.\"\n\t\t// We can ignore it; it occurs because profile.Start() invokes\n\t\t// pprof.go which calls SetCPUProfileRate() again. Since we have\n\t\t// already set the value, the one from pprof will be ignored.\n\t\truntime.SetCPUProfileRate(200)\n\n\t\tprof = profile.Start(\n\t\t\tprofile.Quiet,\n\t\t\tprofile.CPUProfile,\n\t\t\tprofile.ProfilePath(\".\"),\n\t\t)\n\t\tlogrus.Info(\"Initiated cpu-profiling data collection.\")\n\t}\n\n\tif memProfOn {\n\t\tprof = profile.Start(\n\t\t\tprofile.Quiet,\n\t\t\tprofile.MemProfile,\n\t\t\tprofile.ProfilePath(\".\"),\n\t\t)\n\t\tlogrus.Info(\"Initiated memory-profiling data collection.\")\n\t}\n\n\treturn prof, nil\n}", "func (s *peerRESTServer) StartProfilingHandler(w http.ResponseWriter, r *http.Request) {\n\tif !s.IsValid(w, r) {\n\t\ts.writeErrorResponse(w, errors.New(\"Invalid request\"))\n\t\treturn\n\t}\n\n\tvars := mux.Vars(r)\n\tprofiler := vars[peerRESTProfiler]\n\tif profiler == \"\" {\n\t\ts.writeErrorResponse(w, errors.New(\"profiler name is missing\"))\n\t\treturn\n\t}\n\n\tif globalProfiler != nil {\n\t\tglobalProfiler.Stop()\n\t}\n\n\tvar err error\n\tglobalProfiler, err = startProfiler(profiler, \"\")\n\tif err != nil {\n\t\ts.writeErrorResponse(w, err)\n\t\treturn\n\t}\n\n\tw.(http.Flusher).Flush()\n}", "func mustStartDebugServer(fsc *frontendServerConfig) {\n\t// Start the internal server on the internal port if requested.\n\tif fsc.DebugPort != \"\" {\n\t\t// Add the profiling endpoints to the internal router.\n\t\tinternalRouter := mux.NewRouter()\n\n\t\t// Set up the health check endpoint.\n\t\tinternalRouter.HandleFunc(\"/healthz\", httputils.ReadyHandleFunc)\n\n\t\t// Register pprof handlers\n\t\tinternalRouter.HandleFunc(\"/debug/pprof/\", pprof.Index)\n\t\tinternalRouter.HandleFunc(\"/debug/pprof/symbol\", pprof.Symbol)\n\t\tinternalRouter.HandleFunc(\"/debug/pprof/profile\", pprof.Profile)\n\t\tinternalRouter.HandleFunc(\"/debug/pprof/{profile}\", pprof.Index)\n\n\t\tgo func() {\n\t\t\tsklog.Infof(\"Internal server on http://127.0.0.1\" + fsc.DebugPort)\n\t\t\tsklog.Fatal(http.ListenAndServe(fsc.DebugPort, internalRouter))\n\t\t}()\n\t}\n}", "func (s *Servers) initiateGRPCServer(endpoint string, config RuntimeConfig) error {\n\taddr := fmt.Sprintf(\"%s:%d\", endpoint, config.port)\n\tln, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlog.Fatalf(\"gRPC server: failed to listen: %v\", err)\n\t\tos.Exit(2)\n\t}\n\ts.gRPCListener = ln\n\n\topts := []grpc.ServerOption{\n\t\ts.getUnaryInterceptors(),\n\t\t// grpc.ChainUnaryInterceptor(\n\t\t// \ts.Backend.AuthInterceptor.Unary(),\n\t\t// \ts.Backend.ObserverRegistry.UnaryInterceptor,\n\t\t// ),\n\t\t// MaxConnectionAge is just to avoid long connection, to facilitate load balancing\n\t\t// MaxConnectionAgeGrace will torn them, default to infinity\n\t\tgrpc.KeepaliveParams(keepalive.ServerParameters{MaxConnectionAge: 2 * time.Minute}),\n\t}\n\n\t// load mutual TLS cert/key and root CA cert\n\tif config.tlsCaCert != \"\" && config.tlsCert != \"\" && config.tlsKey != \"\" {\n\t\tkeyPair, err := tls.LoadX509KeyPair(config.tlsCert, config.tlsKey)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to load server TLS cert/key with error:%v\", err)\n\t\t}\n\n\t\tcert, err := ioutil.ReadFile(config.tlsCaCert)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to load root CA cert file with error:%v\", err)\n\t\t}\n\n\t\tpool := x509.NewCertPool()\n\t\tpool.AppendCertsFromPEM(cert)\n\n\t\tta := credentials.NewTLS(&tls.Config{\n\t\t\tCertificates: []tls.Certificate{keyPair},\n\t\t\tClientCAs: pool,\n\t\t\tClientAuth: tls.RequireAndVerifyClientCert,\n\t\t})\n\n\t\topts = append(opts, grpc.Creds(ta))\n\t}\n\n\ts.gRPCServer = grpc.NewServer(opts...)\n\n\ts.registerGRPCService()\n\tlog.Printf(\"gRPC server serving at %s\", addr)\n\n\tfb := fallback.NewServer(fmt.Sprintf(\"%s:%d\", endpoint, config.fallbackPort), fmt.Sprintf(\"%s:%d\", endpoint, config.port))\n\ts.fallbackServer = fb\n\n\t// Register reflection service on gRPC server.\n\treflection.Register(s.gRPCServer)\n\n\treturn s.gRPCServer.Serve(ln)\n}", "func RunServer() {\n\tsess = session.Must(session.NewSession())\n\tr := mux.NewRouter()\n\tr.PathPrefix(\"/debug/pprof/\").Handler(http.DefaultServeMux)\n\tr.HandleFunc(\"/warmup/{funcName}\", proxyWarmupHandler)\n\tr.HandleFunc(\"/{funcName}\", proxyHandler)\n\tsrv := &http.Server{\n\t\tHandler: r,\n\t\tAddr: \":8000\",\n\t\tWriteTimeout: 61 * time.Second,\n\t\tReadTimeout: 61 * time.Second,\n\t}\n\tsrv.SetKeepAlivesEnabled(false)\n\tlog.Fatal(srv.ListenAndServe())\n}", "func FuzzProfile(fuzz []byte) int {\n\tif len(fuzz) == 0 {\n\t\treturn -1\n\t}\n\tserver, err := New(EnableProfiler())\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\tserver.Run()\n\tdefer server.profileListener.Close()\n\tdefer server.httpListener.Close()\n\taddress := server.profileListener.Addr\n\tconnection, err := net.Dial(\"tcp\", address)\n\tif err != nil {\n\t\tpanic(err.Error())\n\t}\n\t_, err = connection.Write(fuzz)\n\tif err != nil {\n\t\treturn 0\n\t}\n\tresponse, err := ioutil.ReadAll(connection)\n\tif err != nil {\n\t\treturn 0\n\t}\n\terr = connection.Close()\n\tif err != nil {\n\t\treturn 0\n\t}\n\tfields := strings.Fields(string(response))\n\tif len(fields) < 2 {\n\t\tpanic(\"invalid HTTP response\")\n\t}\n\tcode := fields[1]\n\tif code == \"500\" {\n\t\tpanic(\"server panicked\")\n\t}\n\treturn 1\n}", "func Start() {\n\t//\tdefer profile.Start().Stop() // uncomment to enable profiler\n\t// Listen for incoming connections.\n\tl, err := net.Listen(ConnType, ConnHost+\":\"+ConnPort)\n\tif err != nil {\n\t\tfmt.Println(\"Error listening:\", err.Error())\n\t\tos.Exit(1)\n\t}\n\t// Close the listener when the application closes.\n\tdefer l.Close()\n\tfmt.Println(\"Listening on \" + ConnHost + \":\" + ConnPort)\n\tfor {\n\t\t// Listen for an incoming connection.\n\t\tconn, err := l.Accept()\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error accepting: \", err.Error())\n\t\t\tos.Exit(1)\n\t\t}\n\t\t// Handle connections in a new goroutine.\n\t\tgo handleRequest(conn)\n\t}\n}", "func GRPCSetup(logger *zap.SugaredLogger, db database.Repository) *grpc.Server {\n\tListenAddress := \":\" + os.Getenv(\"PORT\")\n\n\tlistener, err := net.Listen(\"tcp\", ListenAddress)\n\tif err != nil {\n\t\tlogger.Fatalf(\"Unable to listen on %v: %v\", ListenAddress, err)\n\t}\n\n\tgrpcServer := grpc.NewServer()\n\n\n\tif err != nil {\n\t\tlogger.Fatalf(\"Unable to connect to cloud store: %v\", err)\n\t}\n\n\thealthService := server.NewHealthService(db, logger)\n\tpbhealth.RegisterHealthTrackingServer(grpcServer, healthService)\n\n\treflection.Register(grpcServer)\n\n\tgo func() {\n\t\tdefer listener.Close()\n\t\tif err := grpcServer.Serve(listener); err != nil {\n\t\t\tlogger.Fatalf(\"Failed to serve: %v\", err)\n\t\t}\n\t}()\n\n\tlogger.Infof(\"Server started on %v\", ListenAddress)\n\n\treturn grpcServer\n}", "func EnableProfiling() {\n\truntime.SetBlockProfileRate(1000)\n\truntime.SetCPUProfileRate(1000)\n\truntime.SetMutexProfileFraction(10)\n\truntime.MemProfileRate = 10\n}", "func startProfiling() {\n\tvar err error\n\t// if we've passed in filenames to dump profiling data too,\n\t// start collecting profiling data.\n\tif memProfile != \"\" {\n\t\truntime.MemProfileRate = 1\n\t}\n\tif cpuProfile != \"\" {\n\t\tvar f *os.File\n\t\tif f, err = os.Create(cpuProfile); err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t\tpprof.StartCPUProfile(f)\n\t}\n}", "func RunProfilingServer(ctx context.Context, logger *zap.SugaredLogger, h *pkgprofiling.Handler) {\n\tprofilingServer := pkgprofiling.NewServer(h)\n\teg, egCtx := errgroup.WithContext(ctx)\n\teg.Go(profilingServer.ListenAndServe)\n\tgo func() {\n\t\t// This will block until either a signal arrives or one of the grouped functions\n\t\t// returns an error.\n\t\t<-egCtx.Done()\n\n\t\tprofilingServer.Shutdown(context.Background())\n\t\tif err := eg.Wait(); err != nil && err != http.ErrServerClosed {\n\t\t\tlogger.Error(\"Error while running server\", zap.Error(err))\n\t\t}\n\t}()\n}", "func startGrpcBalancer(host string, addresses []string) (error) {\n\t// Start tcp listening port\n\tlis, err := net.Listen(\"tcp\", host)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// Create server service\n\tgrpcServer := grpc.NewServer()\n\ts := &server.LoadBalancer{}\n\t// Init servers for routing\n\ts.InitServers(context.Background(), addresses)\n\tcalculate.RegisterCalculateMatrixServer(grpcServer, s)\n\t// Attach listener to server\n\tgrpcServer.Serve(lis)\n\treturn nil\n}", "func startGRPCServer(address string) error {\n\n\t// create a listener on TCP port\n\tlis, err := net.Listen(\"tcp\", address)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to listen: %v\", err)\n\t}\n\n\t// create a server instance\n\ts := storytel.Server{}\n\n\t// create a gRPC server object\n\tgrpcServer := grpc.NewServer()\n\n\t// attach the Courses service to the server\n\tstorytel.RegisterCoursesServiceServer(grpcServer, &s)\n\n\t// start the server\n\tlog.Printf(\"starting gRPC server on %s\", address)\n\tif err := grpcServer.Serve(lis); err != nil {\n\t\treturn fmt.Errorf(\"failed to serve: %s\", err)\n\t}\n\n\treturn nil\n}", "func webServer(server string, port int) error {\n\tln, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", port))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif server != \"\" {\n\t\thttp.DefaultServeMux.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\t\thttp.Redirect(w, r, \"http://\"+server, 302)\n\t\t})\n\t}\n\ts := http.Server{\n\t\tAddr: ln.Addr().String(),\n\t\tHandler: &loghttp.Handler{Handler: http.DefaultServeMux},\n\t\tReadTimeout: 60 * time.Second,\n\t\tWriteTimeout: 60 * time.Second,\n\t\tMaxHeaderBytes: 1 << 16,\n\t}\n\tgo s.Serve(ln)\n\tlog.Printf(\"Visit: http://%s:%d/debug/pprof for debugging\", shared.Hostname(), port)\n\treturn nil\n}", "func startProfiling(ctx context.Context, c *Client) (func() error, error) {\n\ttyp, enabled := ctx.Value(requestProfilingKey).(ProfileType)\n\tif !enabled {\n\t\treturn emptyFinisher, nil\n\t}\n\tfile, err := ioutil.TempFile(\"\", \"pprof-me\")\n\tif err != nil {\n\t\treturn emptyFinisher, err\n\t}\n\tcloseAndDeleteTmpFile := func() {\n\t\tfile.Close()\n\t\tos.Remove(file.Name())\n\t}\n\tswitch typ {\n\tcase CPUProfileType:\n\t\tif err := pprof.StartCPUProfile(file); err != nil {\n\t\t\tcloseAndDeleteTmpFile()\n\t\t\treturn emptyFinisher, err\n\t\t}\n\t\treturn func() error {\n\t\t\tdefer closeAndDeleteTmpFile()\n\t\t\tpprof.StopCPUProfile()\n\t\t\t_, err := file.Seek(0, os.SEEK_SET)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t_, err = c.SendProfile(ctx, \"TODO: fill this in\", file)\n\t\t\treturn err\n\t\t}, nil\n\tcase HeapProfileType:\n\t\terr = pprof.Lookup(\"heap\").WriteTo(file, 0)\n\t\tif err != nil {\n\t\t\tcloseAndDeleteTmpFile()\n\t\t\treturn emptyFinisher, err\n\t\t}\n\t\t_, err = c.SendProfile(ctx, \"TODO: fill this in\", file)\n\t\tif err != nil {\n\t\t\tcloseAndDeleteTmpFile()\n\t\t\treturn emptyFinisher, err\n\t\t}\n\t\treturn func() error {\n\t\t\tdefer closeAndDeleteTmpFile()\n\t\t\terr = file.Truncate(0)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\terr = pprof.Lookup(\"heap\").WriteTo(file, 0)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\t_, err = c.SendProfile(ctx, \"TODO: fill this in\", file)\n\t\t\treturn err\n\t\t}, nil\n\tdefault:\n\t\tcloseAndDeleteTmpFile()\n\t\treturn emptyFinisher, fmt.Errorf(\"unknown profile type: %v\", typ)\n\t}\n\n}", "func Instrument(opts flagutil.InstrumentationOptions) {\n\tServe(opts.PProfPort)\n\tif opts.ProfileMemory {\n\t\tWriteMemoryProfiles(opts.MemoryProfileInterval)\n\t}\n}", "func StartServer(cleanUpChan chan int){\n\tGrpcServer = &Server{\n CleanUpChan:cleanUpChan ,\n\t GrpcServer: grpc.NewServer(),\n\t}\n\tregisterGrpcServices(GrpcServer.GrpcServer)\n\tif err := GrpcServer.GrpcServer.Serve(getListner(port)); err != nil {\n\t\tpanic(err)\n\t}\n}", "func (s *GrpcServer) Start(sw switcher, connpool ConnPool, registerServer func(s *grpc.Server)) {\n\tlog.Infof(\"Starting %s...\", s.Config.GrpcServiceName())\n\ts.Initializer.InitService(s)\n\t//s.grpcServer = s.startGrpcServiceInternal(registerServer, false)\n\ts.httpServer = s.startGrpcHTTPServerInternal(sw)\n\ts.connpool = connpool\n\twatchConfigReload(s)\n}", "func IsProfiling() bool {\n\treturn globalProbe.IsProfiling()\n}", "func main() {\n\tif len(os.Args) < 2 {\n\t\tlog.Fatalf(\"Usage: %s <port to listen on>\", os.Args[0])\n\t}\n\tport := os.Args[1]\n\n\tfile, _ := os.Open(\"config.json\")\n\tdefer file.Close()\n\tdecoder := json.NewDecoder(file)\n\tconfig := Config{}\n\terr := decoder.Decode(&config)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to get configuration: %v\", err)\n\t}\n\n\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%s\", port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\n\ts := api.Server{\n\t\tn: config.every_nth_request_slow,\n\t\tdelay: config.seconds_delay,\n\t}\n\tgrpcServer := grpc.NewServer()\n\tapi.RegisterRandomStrServer(grpcServer, &s)\n\n\tif err := grpcServer.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %s\", err)\n\t}\n}", "func Run(port int) {\n\n\trunStartupHooks()\n\n}", "func Serve(port int) {\n\tpprofMux := http.NewServeMux()\n\tpprofMux.HandleFunc(\"/debug/pprof/\", pprof.Index)\n\tpprofMux.HandleFunc(\"/debug/pprof/cmdline\", pprof.Cmdline)\n\tpprofMux.HandleFunc(\"/debug/pprof/profile\", pprof.Profile)\n\tpprofMux.HandleFunc(\"/debug/pprof/symbol\", pprof.Symbol)\n\tpprofMux.HandleFunc(\"/debug/pprof/trace\", pprof.Trace)\n\tpprofMux.Handle(\"/debug/fgprof\", fgprof.Handler())\n\tserver := &http.Server{Addr: \":\" + strconv.Itoa(port), Handler: pprofMux}\n\tinterrupts.ListenAndServe(server, 5*time.Second)\n}", "func (s *sdkGrpcServer) Start() error {\n\n\t// Setup https if certs have been provided\n\topts := make([]grpc.ServerOption, 0)\n\tif s.config.Net != \"unix\" && s.config.Security.Tls != nil {\n\t\tcreds, err := credentials.NewServerTLSFromFile(\n\t\t\ts.config.Security.Tls.CertFile,\n\t\t\ts.config.Security.Tls.KeyFile)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"Failed to create credentials from cert files: %v\", err)\n\t\t}\n\t\topts = append(opts, grpc.Creds(creds))\n\t\ts.log.Info(\"SDK TLS enabled\")\n\t} else {\n\t\ts.log.Info(\"SDK TLS disabled\")\n\t}\n\n\t// Add correlation interceptor\n\tcorrelationInterceptor := correlation.ContextInterceptor{\n\t\tOrigin: correlation.ComponentSDK,\n\t}\n\n\t// Setup authentication and authorization using interceptors if auth is enabled\n\tif len(s.config.Security.Authenticators) != 0 {\n\t\topts = append(opts, grpc.UnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryServer(\n\t\t\t\ts.rwlockUnaryIntercepter,\n\t\t\t\tcorrelationInterceptor.ContextUnaryServerInterceptor,\n\t\t\t\tgrpc_auth.UnaryServerInterceptor(s.auth),\n\t\t\t\ts.authorizationServerUnaryInterceptor,\n\t\t\t\ts.loggerServerUnaryInterceptor,\n\t\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t\t)))\n\t\topts = append(opts, grpc.StreamInterceptor(\n\t\t\tgrpc_middleware.ChainStreamServer(\n\t\t\t\ts.rwlockStreamIntercepter,\n\t\t\t\tgrpc_auth.StreamServerInterceptor(s.auth),\n\t\t\t\ts.authorizationServerStreamInterceptor,\n\t\t\t\ts.loggerServerStreamInterceptor,\n\t\t\t\tgrpc_prometheus.StreamServerInterceptor,\n\t\t\t)))\n\t} else {\n\t\topts = append(opts, grpc.UnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryServer(\n\t\t\t\ts.rwlockUnaryIntercepter,\n\t\t\t\tcorrelationInterceptor.ContextUnaryServerInterceptor,\n\t\t\t\ts.loggerServerUnaryInterceptor,\n\t\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t\t)))\n\t\topts = append(opts, grpc.StreamInterceptor(\n\t\t\tgrpc_middleware.ChainStreamServer(\n\t\t\t\ts.rwlockStreamIntercepter,\n\t\t\t\ts.loggerServerStreamInterceptor,\n\t\t\t\tgrpc_prometheus.StreamServerInterceptor,\n\t\t\t)))\n\t}\n\n\t// Start the gRPC Server\n\terr := s.GrpcServer.StartWithServer(func() *grpc.Server {\n\t\tgrpcServer := grpc.NewServer(opts...)\n\n\t\tapi.RegisterOpenStorageClusterServer(grpcServer, s.clusterServer)\n\t\tapi.RegisterOpenStorageNodeServer(grpcServer, s.nodeServer)\n\t\tapi.RegisterOpenStorageObjectstoreServer(grpcServer, s.objectstoreServer)\n\t\tapi.RegisterOpenStorageSchedulePolicyServer(grpcServer, s.schedulePolicyServer)\n\t\tapi.RegisterOpenStorageIdentityServer(grpcServer, s.identityServer)\n\t\tapi.RegisterOpenStorageVolumeServer(grpcServer, s.volumeServer)\n\t\tapi.RegisterOpenStorageMigrateServer(grpcServer, s.volumeServer)\n\t\tapi.RegisterOpenStorageCredentialsServer(grpcServer, s.credentialServer)\n\t\tapi.RegisterOpenStorageCloudBackupServer(grpcServer, s.cloudBackupServer)\n\t\tapi.RegisterOpenStorageMountAttachServer(grpcServer, s.volumeServer)\n\t\tapi.RegisterOpenStorageAlertsServer(grpcServer, s.alertsServer)\n\t\tapi.RegisterOpenStorageClusterPairServer(grpcServer, s.clusterPairServer)\n\t\tapi.RegisterOpenStoragePolicyServer(grpcServer, s.policyServer)\n\t\tapi.RegisterOpenStorageClusterDomainsServer(grpcServer, s.clusterDomainsServer)\n\t\tapi.RegisterOpenStorageFilesystemTrimServer(grpcServer, s.filesystemTrimServer)\n\t\tapi.RegisterOpenStorageFilesystemCheckServer(grpcServer, s.filesystemCheckServer)\n\t\tapi.RegisterOpenStorageWatchServer(grpcServer, s.watcherServer)\n\t\tif s.diagsServer != nil {\n\t\t\tapi.RegisterOpenStorageDiagsServer(grpcServer, s.diagsServer)\n\t\t}\n\n\t\tif s.bucketServer != nil {\n\t\t\tapi.RegisterOpenStorageBucketServer(grpcServer, s.bucketServer)\n\t\t}\n\n\t\tif s.storagePoolServer != nil {\n\t\t\tapi.RegisterOpenStoragePoolServer(grpcServer, s.storagePoolServer)\n\t\t}\n\n\t\tif s.config.Security.Role != nil {\n\t\t\tapi.RegisterOpenStorageRoleServer(grpcServer, s.roleServer)\n\t\t}\n\t\tif s.jobServer != nil {\n\t\t\tapi.RegisterOpenStorageJobServer(grpcServer, s.jobServer)\n\t\t}\n\n\t\t// Register stats for all the services\n\t\ts.registerPrometheusMetrics(grpcServer)\n\n\t\ts.registerServerExtensions(grpcServer)\n\n\t\treturn grpcServer\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn nil\n}", "func createHttpService(port int) {\n\thttpMux := http.NewServeMux()\n\thttpMux.HandleFunc(\"/debug/pprof/\", pprof.Index)\n\thttpMux.HandleFunc(\"/debug/pprof/cmdline\", pprof.Cmdline)\n\thttpMux.HandleFunc(\"/debug/pprof/profile\", pprof.Profile)\n\thttpMux.HandleFunc(\"/debug/pprof/symbol\", pprof.Symbol)\n\thttpMux.HandleFunc(\"/debug/pprof/trace\", pprof.Trace)\n\thttpMux.HandleFunc(\"/check\", check)\n\n\thttpMux.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\tw.Write([]byte(`hello`))\n\t})\n\n\t// test stat prometheus\n\thttpMux.HandleFunc(\"/test\", testStat)\n\n\t// add prometheus metrics handler\n\thttpMux.Handle(\"/metrics\", promhttp.Handler())\n\n\t// http server\n\tgo func() {\n\t\tdefer func() {\n\t\t\tif err := recover(); err != nil {\n\t\t\t\tlog.Println(\"PProf exec recover: \", err)\n\t\t\t}\n\t\t}()\n\n\t\tlog.Println(\"server PProf run on: \", port)\n\n\t\tif err := http.ListenAndServe(fmt.Sprintf(\"0.0.0.0:%d\", port), httpMux); err != nil {\n\t\t\tlog.Println(\"PProf listen error: \", err)\n\t\t}\n\n\t}()\n}", "func (sd *stackediff) ProfilingEnable() {\n\tsd.profiletimer = profiletimer.StartProfileTimer()\n}", "func init() {\n\tif os.Getenv(\"GRPC_PING_HOST\") != \"\" {\n\t\tvar err error\n\t\tconn, err = NewConn(os.Getenv(\"GRPC_PING_HOST\"), os.Getenv(\"GRPC_PING_INSECURE\") != \"\")\n\t\tif err != nil {\n\t\t\tlog.Fatal(err)\n\t\t}\n\t} else {\n\t\tlog.Println(\"Starting without support for SendUpstream: configure with 'GRPC_PING_HOST' environment variable. E.g., example.com:443\")\n\t}\n}", "func ServerWithConfig(cfg *config.Config) []cli.Flag {\n\treturn []cli.Flag{\n\t\t&cli.BoolFlag{\n\t\t\tName: \"tracing-enabled\",\n\t\t\tUsage: \"Enable sending traces\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_TRACING_ENABLED\"},\n\t\t\tDestination: &cfg.Tracing.Enabled,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"tracing-type\",\n\t\t\tValue: \"jaeger\",\n\t\t\tUsage: \"Tracing backend type\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_TRACING_TYPE\"},\n\t\t\tDestination: &cfg.Tracing.Type,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"tracing-endpoint\",\n\t\t\tValue: \"\",\n\t\t\tUsage: \"Endpoint for the agent\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_TRACING_ENDPOINT\"},\n\t\t\tDestination: &cfg.Tracing.Endpoint,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"tracing-collector\",\n\t\t\tValue: \"\",\n\t\t\tUsage: \"Endpoint for the collector\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_TRACING_COLLECTOR\"},\n\t\t\tDestination: &cfg.Tracing.Collector,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"tracing-service\",\n\t\t\tValue: \"thumbnails\",\n\t\t\tUsage: \"Service name for tracing\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_TRACING_SERVICE\"},\n\t\t\tDestination: &cfg.Tracing.Service,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"debug-addr\",\n\t\t\tValue: \"0.0.0.0:9189\",\n\t\t\tUsage: \"Address to bind debug server\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_DEBUG_ADDR\"},\n\t\t\tDestination: &cfg.Debug.Addr,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"debug-token\",\n\t\t\tValue: \"\",\n\t\t\tUsage: \"Token to grant metrics access\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_DEBUG_TOKEN\"},\n\t\t\tDestination: &cfg.Debug.Token,\n\t\t},\n\t\t&cli.BoolFlag{\n\t\t\tName: \"debug-pprof\",\n\t\t\tUsage: \"Enable pprof debugging\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_DEBUG_PPROF\"},\n\t\t\tDestination: &cfg.Debug.Pprof,\n\t\t},\n\t\t&cli.BoolFlag{\n\t\t\tName: \"debug-zpages\",\n\t\t\tUsage: \"Enable zpages debugging\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_DEBUG_ZPAGES\"},\n\t\t\tDestination: &cfg.Debug.Zpages,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"grpc-name\",\n\t\t\tValue: \"thumbnails\",\n\t\t\tUsage: \"Name of the service\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_GRPC_NAME\"},\n\t\t\tDestination: &cfg.Server.Name,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"grpc-addr\",\n\t\t\tValue: \"0.0.0.0:9185\",\n\t\t\tUsage: \"Address to bind grpc server\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_GRPC_ADDR\"},\n\t\t\tDestination: &cfg.Server.Address,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"grpc-namespace\",\n\t\t\tValue: \"com.owncloud.api\",\n\t\t\tUsage: \"Set the base namespace for the grpc namespace\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_GRPC_NAMESPACE\"},\n\t\t\tDestination: &cfg.Server.Namespace,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"filesystemstorage-root\",\n\t\t\tValue: filepath.Join(os.TempDir(), \"ocis-thumbnails/\"),\n\t\t\tUsage: \"Root path of the filesystem storage directory\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_FILESYSTEMSTORAGE_ROOT\"},\n\t\t\tDestination: &cfg.Thumbnail.FileSystemStorage.RootDirectory,\n\t\t},\n\t\t&cli.StringFlag{\n\t\t\tName: \"webdavsource-baseurl\",\n\t\t\tValue: \"https://localhost:9200/remote.php/webdav/\",\n\t\t\tUsage: \"Base url for a webdav api\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_WEBDAVSOURCE_BASEURL\"},\n\t\t\tDestination: &cfg.Thumbnail.WebDavSource.BaseURL,\n\t\t},\n\t\t&cli.BoolFlag{\n\t\t\tName: \"webdavsource-insecure\",\n\t\t\tValue: true,\n\t\t\tUsage: \"Whether to skip certificate checks\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_WEBDAVSOURCE_INSECURE\"},\n\t\t\tDestination: &cfg.Thumbnail.WebDavSource.Insecure,\n\t\t},\n\t\t&cli.StringSliceFlag{\n\t\t\tName: \"thumbnail-resolution\",\n\t\t\tValue: cli.NewStringSlice(\"16x16\", \"32x32\", \"64x64\", \"128x128\", \"1920x1080\", \"3840x2160\", \"7680x4320\"),\n\t\t\tUsage: \"--thumbnail-resolution 16x16 [--thumbnail-resolution 32x32]\",\n\t\t\tEnvVars: []string{\"THUMBNAILS_RESOLUTIONS\"},\n\t\t},\n\t}\n}", "func ServerProfile() {\n\tserver := tools_lib.OptStr(\"s\")\n\taddress := tools_lib.OptStrDef(\"a\", \"\")\n\taction := tools_lib.OptStr(\"x\")\n\tvar isStart bool\n\tif action == \"start\" {\n\t\tisStart = true\n\t} else if action == \"stop\" {\n\t\tisStart = false\n\t} else {\n\t\tlog.Infof(\"invalid action %s\", action)\n\t\treturn\n\t}\n\n\terr := rpc.ServerProfile(server, isStart, address)\n\tif err != nil {\n\t\tlog.Error(err)\n\t\treturn\n\t}\n\tlog.Infof(\"success\")\n}", "func main() {\n\tportNo := os.Args[1]\n\tstartServerMode(portNo)\n}", "func RunServer(port int) {}", "func (p *grpcPort) serve(baseCtx func() context.Context) error {\n\tp.m.Lock()\n\tif p.server != nil {\n\t\tpanic(\"the server has already started\")\n\t}\n\n\t// Install outer-most interceptors that append the root server context values\n\t// to the per-request context (which is basically just a context.Background()\n\t// with its cancellation controlled by the gRPC server). NewServer will append\n\t// other interceptors in `opts` after these root ones.\n\tinjectCtx := contextInjector(baseCtx)\n\tp.opts = append(p.opts,\n\t\tgrpc.UnaryInterceptor(injectCtx.Unary()),\n\t\tgrpc.StreamInterceptor(injectCtx.Stream()),\n\t)\n\tp.server = grpc.NewServer(p.opts...)\n\n\t// Install reflection only into gRPC server (not pRPC one), since it uses\n\t// streaming RPCs not supported by pRPC. pRPC has its own similar service\n\t// called Discovery.\n\treflection.Register(p.server)\n\n\t// Services installed into both pRPC and gRPC.\n\thasHealth := false\n\tfor _, svc := range p.services {\n\t\tp.server.RegisterService(svc.desc, svc.impl)\n\t\tif strings.HasPrefix(svc.desc.ServiceName, \"grpc.health.\") {\n\t\t\thasHealth = true\n\t\t}\n\t}\n\n\t// If the health check service not installed yet by the server user, install\n\t// our own basic one. The server users might want to install their own\n\t// implementations if they plan to dynamically control the serving status.\n\tif !hasHealth {\n\t\tp.healthSrv = health.NewServer()\n\t\tgrpc_health_v1.RegisterHealthServer(p.server, p.healthSrv)\n\t}\n\n\tserver := p.server\n\tp.m.Unlock()\n\n\treturn server.Serve(p.listener)\n}", "func (c *HAProxyController) handlePprof() (err error) {\n\tpprofBackend := \"pprof\"\n\n\terr = c.Client.BackendCreate(models.Backend{\n\t\tName: pprofBackend,\n\t\tMode: \"http\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = c.Client.BackendServerCreate(pprofBackend, models.Server{\n\t\tName: \"pprof\",\n\t\tAddress: \"127.0.0.1:6060\",\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tlogger.Debug(\"pprof backend created\")\n\terr = route.AddHostPathRoute(route.Route{\n\t\tBackendName: pprofBackend,\n\t\tPath: &store.IngressPath{\n\t\t\tPath: \"/debug/pprof\",\n\t\t\tExactPathMatch: false,\n\t\t},\n\t}, c.Cfg.MapFiles)\n\tif err != nil {\n\t\treturn err\n\t}\n\tc.Cfg.ActiveBackends[pprofBackend] = struct{}{}\n\treturn nil\n}", "func startServer() error {\n\ts := signal.NewSignal()\n\tctx := context.Background()\n\tctx, cancel := context.WithCancel(ctx)\n\tdefer cancel()\n\n\t// Initialize and register the implementation of gRPC interface\n\tvar grpcServer *grpc.Server\n\tvar proxyRegstr []func(context.Context, *proxy.ServeMux, string, []grpc.DialOption) error\n\toptions := []grpc.ServerOption{\n\t\tgrpc.UnaryInterceptor(loggerInterceptor()),\n\t}\n\n\tif cfg.CoreServiceMode {\n\t\trpcServer := rpccore.NewRPCServer(s, internal.NewProver, internal.NewVerifier, shared.NewHashFunc, shared.NewScryptHashFunc)\n\t\tgrpcServer = grpc.NewServer(options...)\n\n\t\tapicore.RegisterPoetCoreProverServer(grpcServer, rpcServer)\n\t\tapicore.RegisterPoetVerifierServer(grpcServer, rpcServer)\n\t\tproxyRegstr = append(proxyRegstr, apicore.RegisterPoetCoreProverHandlerFromEndpoint)\n\t\tproxyRegstr = append(proxyRegstr, apicore.RegisterPoetVerifierHandlerFromEndpoint)\n\t} else {\n\t\trpcServer := rpc.NewRPCServer()\n\t\tgrpcServer = grpc.NewServer(options...)\n\n\t\tapi.RegisterPoetServer(grpcServer, rpcServer)\n\t\tproxyRegstr = append(proxyRegstr, api.RegisterPoetHandlerFromEndpoint)\n\t}\n\n\t// Start the gRPC server listening for HTTP/2 connections.\n\tlis, err := net.Listen(cfg.RPCListener.Network(), cfg.RPCListener.String())\n\tif err != nil {\n\t\treturn fmt.Errorf(\"failed to listen: %v\\n\", err)\n\t}\n\tdefer lis.Close()\n\n\tgo func() {\n\t\trpcsLog.Infof(\"RPC server listening on %s\", lis.Addr())\n\t\tgrpcServer.Serve(lis)\n\t}()\n\n\t// Start the REST proxy for the gRPC server above.\n\tmux := proxy.NewServeMux()\n\tfor _, r := range proxyRegstr {\n\t\terr := r(ctx, mux, cfg.RPCListener.String(), []grpc.DialOption{grpc.WithInsecure()})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tgo func() {\n\t\trpcsLog.Infof(\"REST proxy start listening on %s\", cfg.RESTListener.String())\n\t\terr := http.ListenAndServe(cfg.RESTListener.String(), mux)\n\t\trpcsLog.Errorf(\"REST proxy failed listening: %s\\n\", err)\n\t}()\n\n\t// Wait for shutdown signal from either a graceful server stop or from\n\t// the interrupt handler.\n\t<-s.ShutdownChannel()\n\treturn nil\n}", "func profile(c echo.Context) error {\n\tpprof.Profile(c.Response().Writer, c.Request())\n\treturn nil\n}", "func EnvProfile(envPrefix string, opts ...EnvProfileOption) EnvProfiler {\n\tvar (\n\t\tprofileEnv = envPrefix + \"PROFILE\"\n\t\tpathEnv = envPrefix + \"PROFILE_PATH\"\n\t\trateEnv = envPrefix + \"PROFILE_RATE\"\n\t\tquietEnv = envPrefix + \"PROFILE_QUIET\"\n\n\t\tprof = os.Getenv(profileEnv)\n\t\tpath = os.Getenv(pathEnv)\n\t\trateStr = os.Getenv(rateEnv)\n\t\tquiet = os.Getenv(quietEnv) != \"\"\n\n\t\text = \".pprof\"\n\t)\n\n\tvar pkind func(*profile.Profile)\n\tvar options []func(p *profile.Profile)\n\tvar rate int64\n\tvar err error\n\n\tif rateStr != \"\" {\n\t\trate, err = strconv.ParseInt(rateStr, 0, 64)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"profiletools: profile rate could not be parsed: %v\", err))\n\t\t}\n\t}\n\tfor _, opt := range opts {\n\t\topt(&options)\n\t}\n\n\tvar applyRate = func() {}\n\n\tswitch prof {\n\tcase \"cpu\":\n\t\tpkind = profile.CPUProfile\n\t\tif rateStr != \"\" {\n\t\t\tapplyRate = func() {\n\t\t\t\truntime.SetCPUProfileRate(int(rate))\n\t\t\t}\n\t\t}\n\n\tcase \"block\":\n\t\tpkind = profile.BlockProfile\n\t\tif rateStr != \"\" {\n\t\t\tapplyRate = func() {\n\t\t\t\truntime.SetBlockProfileRate(int(rate))\n\t\t\t}\n\t\t}\n\n\tcase \"clock\":\n\t\tpkind = profile.ClockProfile\n\n\tcase \"mem\":\n\t\tpkind = profile.MemProfile\n\n\tcase \"trace\":\n\t\tpkind = profile.TraceProfile\n\t\text = \".out\"\n\n\tdefault:\n\t\treturn stopper{}\n\t}\n\n\tprog := filepath.Base(os.Args[0])\n\n\tif path == \"\" {\n\t\tpath = filepath.Join(os.TempDir(), fmt.Sprintf(\"%s-%d\", prog, time.Now().UnixNano()))\n\t}\n\n\t// WARNING: this relies on assumptions about the internals of the profile\n\t// package, which could change without warning.\n\texpectedFile := filepath.Join(path, prof+ext)\n\tlastFile := filepath.Join(os.TempDir(), fmt.Sprintf(\"%s-%s%s\", prog, prof, ext))\n\n\toptions = append(options, pkind, profile.ProfilePath(path))\n\tif quiet {\n\t\toptions = append(options, profile.Quiet)\n\t}\n\n\tapplyRate()\n\tstop := profile.Start(options...)\n\treturn stopper{\n\t\tfunc() {\n\t\t\tstop.Stop()\n\t\t\t_ = copyFile(expectedFile, lastFile)\n\t\t\tif !quiet {\n\t\t\t\tlog.Printf(\"profile: %s available at %s\\n\", prof, lastFile)\n\t\t\t}\n\t\t},\n\t}\n}", "func Start(port string) {\n\tif port == \"\" {\n\t\tport = portDefault\n\t}\n\tlog.Printf(\"Starting new server on port %v.\\n\", port)\n\n\tport = fmt.Sprintf(\":%v\", port)\n\tlis, err := net.Listen(\"tcp\", port)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\ts := grpc.NewServer()\n\tproto.RegisterSumServiceServer(s, &server{})\n\tif err := s.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func StartGrpcServer(database *gorm.DB, port int) {\n\n\tgrpcServer := grpc.NewServer()\n\treflection.Register(grpcServer)\n\n\tpixRepository := repository.PixKeyRepositoryDB{DB: database}\n\tpixUseCase := usecase.PixUseCase{PixKeyRepository: pixRepository}\n\tpixGrpcService := NewPixGrpcService(pixUseCase)\n\n\tpb.RegisterPixServiceServer(grpcServer, pixGrpcService)\n\n\taddress := fmt.Sprintf(\"0.0.0.0:%d\", port)\n\tlistener, err := net.Listen(\"tcp\", address)\n\n\tif err != nil {\n\t\tlog.Fatal(\"Cannot start the server - \", err)\n\t}\n\n\tlog.Printf(\"gRPC server running on port %d\", port)\n\n\terr = grpcServer.Serve(listener)\n\n\tif err != nil {\n\t\tlog.Fatal(\"Cannot start gRPC server - \", err)\n\t}\n\n}", "func Start() {\n\tflag.Parse()\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\"localhost:%d\", *port))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\tvar opts []grpc.ServerOption\n\tif *tls {\n\t\tif *certFile == \"\" {\n\t\t\t*certFile = testdata.Path(\"server1.pem\")\n\t\t}\n\t\tif *keyFile == \"\" {\n\t\t\t*keyFile = testdata.Path(\"server1.key\")\n\t\t}\n\t\tcreds, err := credentials.NewServerTLSFromFile(*certFile, *keyFile)\n\t\tif err != nil {\n\t\t\tlog.Fatalf(\"Failed to generate credentials %v\", err)\n\t\t}\n\t\topts = []grpc.ServerOption{grpc.Creds(creds)}\n\t}\n\tfmt.Println(\"Running at localhost:\", *port)\n\tgrpcServer := grpc.NewServer(opts...)\n\tpb.RegisterRatingServiceServer(grpcServer, newServer())\n\tgrpcServer.Serve(lis)\n}", "func (a *Admin) StartCPUProfiler(_ *http.Request, _ *struct{}, _ *api.EmptyReply) error {\n\ta.Log.Debug(\"API called\",\n\t\tzap.String(\"service\", \"admin\"),\n\t\tzap.String(\"method\", \"startCPUProfiler\"),\n\t)\n\n\treturn a.profiler.StartCPUProfiler()\n}", "func startHTTPListener() {\n\thttp.ListenAndServe(\":\"+GetConfig().Port, nil)\n}", "func main() {\n\t//init()\n\thostname, err := os.Hostname()\n\tif err != nil {\n\t\thostname = \"MISSING\"\n\t}\n\n\tinnerPort := os.Getenv(\"BACKEND_PORT\")\n\tif innerPort == \"\" {\n\t\tlog.Printf(\"Running on %s:5001\", hostname)\n\t\tlog.Fatal(http.ListenAndServe(\":5001\", nil))\n\t} else {\n\t\tlog.Printf(\"Running on %s:%s\", hostname, innerPort)\n\t\tlog.Fatal(http.ListenAndServe(fmt.Sprintf(\":%s\", innerPort), nil))\n\t}\n}", "func (r *runtime) startGRPCServer() {\n\tr.logger.Info(\"starting GRPC server\")\n\tr.grpcServer = newGRPCServer(r.config.BrokerBase.GRPC, linmetric.BrokerRegistry)\n\n\t// bind grpc handlers\n\tr.rpcHandler = &rpcHandler{\n\t\thandler: query.NewTaskHandler(\n\t\t\tr.config.Query,\n\t\t\tr.factory.taskServer,\n\t\t\tquery.NewIntermediateTaskProcessor(*r.node, r.config.Query.Timeout.Duration(),\n\t\t\t\tr.stateMgr, r.srv.taskManager, r.srv.transportManager),\n\t\t\tr.queryPool,\n\t\t),\n\t}\n\n\tprotoCommonV1.RegisterTaskServiceServer(r.grpcServer.GetServer(), r.rpcHandler.handler)\n\n\tgo serveGRPCFn(r.grpcServer)\n}", "func startCacheServer(cfg upspin.Config) (*upspin.Endpoint, error) {\n\tcfg = setCertPool(cfg)\n\n\t// Find a free port.\n\tport, err := testutil.PickPort()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\taddr := fmt.Sprintf(\"localhost:%s\", port)\n\tep, _ := upspin.ParseEndpoint(\"remote,\" + addr)\n\n\t// Create a directory for the cacheserver's log and data.\n\tflags.CacheDir, err = ioutil.TempDir(\"\", \"cacheserver\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif _, err = serve(cfg, addr); err != nil {\n\t\tos.RemoveAll(flags.CacheDir)\n\t\treturn nil, err\n\t}\n\treturn ep, nil\n}", "func StartListen(request *restful.Request, response *restful.Response) {\n\tportstring := request.PathParameter(\"port-id\")\n\tglog.Info(\"get the port number\", portstring)\n\tportint, err := strconv.Atoi(portstring)\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\t}\n\tpid, pname, err := lib.Getinfofromport(portint)\n\n\tif pid == -1 {\n\t\tresponse.WriteError(500, errors.New(\"the port is not be listend in this machine ( /proc/net/tcp and /proc/net/tcp6)\"))\n\t\treturn\n\t}\n\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\n\t}\n\tglog.Info(pname, pid)\n\n\t//create the process instance and get the detail info of specified pid\n\tPdetail := &model.ProcessDetail{\n\t\tProcess: &process.Process{Pid: 22637},\n\t}\n\tcmd, err := Pdetail.Cmdinfo()\n\tif err != nil {\n\t\tglog.Info(err)\n\t}\n\tglog.Info(cmd)\n\t//TODO get more info of this instance\n\n\t//start listen to specific ip:port for 60s and send the data to es\n\ttimesignal := time.After(time.Second * 30)\n\n\t//start collect and check the timesignal every one minutes\n\tgo lib.Startcollect(portint, device, timesignal)\n\n\tresponse.Write([]byte(\"activated\"))\n\n}", "func Start(profileName string, pathParam string) {\n\n\tif strings.TrimSpace(profileName) == \"\" {\n\t\tlog.Fatal(\"Missing the name of the profile to use\")\n\t}\n\t// Check if profile exists and load it\n\tif !HasProfileFile(profileName) {\n\t\tlog.Fatal(\"The requested profile \" + profileName + \" does not exists\")\n\t}\n\n\tprofileData := getProfileFactory(getProfilesPath(), profileName)\n\n\t// Add Pathparam to profileData model\n\tprofileData.PathParam = pathParam\n\n\tCheckAndResolveProfileConflicts(profileData)\n\n\t// Check if a profile with same alias is already runnin\n\tif docker.IsProfileStopped(docker.GetAlias(profileData)) {\n\t\t// Then remove the profile using a simple docker rm\n\t\tfmt.Println(\"Profile [\" + profileName + \"] is stopped, removing...\")\n\t\tdocker.RemoveProfile(profileData)\n\t} else {\n\t\tfmt.Println(\"profile is not stopped\")\n\t}\n\tfmt.Println(\"profile stopped\")\n\tdocker.StartProfile(profileData)\n\n}", "func TestPortVariable(t *testing.T) {\n\tpath := filepath.Join(basePath, \"0-basic\", \"test-service\")\n\tgrpcPort := strconv.Itoa(FindFreePort())\n\thttpPort := strconv.Itoa(FindFreePort())\n\tdebugPort := strconv.Itoa(FindFreePort())\n\n\t// Set environment variables\n\tdefer os.Unsetenv(\"PORT\")\n\tif err := os.Setenv(\"PORT\", httpPort); err != nil {\n\t\tt.Fatal(err)\n\t}\n\n\t// launch long running server\n\tserver, srvrOut, errc := runServer(path,\n\t\t\"-grpc.addr\", \":\"+grpcPort,\n\t\t\"-debug.addr\", \":\"+debugPort)\n\n\terr := reapServer(server, errc)\n\tif err != nil {\n\t\tt.Error(srvrOut.String())\n\t\tt.Fatalf(\"cannot reap server: %v\", err)\n\t}\n}", "func (s *Server) Run() error {\n\tlogger, err := NewDefaultLogger()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to create logger\")\n\t}\n\n\tl, err := net.Listen(\"tcp\", s.address)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"failed to listen on %s\", s.address)\n\t}\n\n\tgrpc_zap.ReplaceGrpcLogger(logger)\n\tgrpc_prometheus.EnableHandlingTimeHistogram()\n\n\ts.grpc = grpc.NewServer(\n\t\tgrpc.UnaryInterceptor(\n\t\t\tgrpc_middleware.ChainUnaryServer(\n\t\t\t\tgrpc_validator.UnaryServerInterceptor(),\n\t\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t\t\tgrpc_zap.UnaryServerInterceptor(logger),\n\t\t\t\tgrpc_recovery.UnaryServerInterceptor(),\n\t\t\t),\n\t\t),\n\t)\n\n\tapi.RegisterDeployServiceServer(s.grpc, s)\n\n\t// not exactly sure what this is used for, but examples\n\t// always do it:\n\t// https://godoc.org/google.golang.org/grpc/reflection\n\treflection.Register(s.grpc)\n\n\tgwmux := runtime.NewServeMux()\n\n\t_, port, err := net.SplitHostPort(s.address)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"invalid address %s\", s.address)\n\t}\n\n\t// TODO: need to determine if we can actually connect to localhost\n\tif err := api.RegisterDeployServiceHandlerFromEndpoint(s.ctx, gwmux, net.JoinHostPort(\"127.0.0.1\", port), []grpc.DialOption{grpc.WithInsecure()}); err != nil {\n\t\treturn errors.Wrap(err, \"failed to register grpc gateway\")\n\t}\n\n\tmux := http.NewServeMux()\n\tmux.Handle(\"/metrics\", promhttp.Handler())\n\tmux.Handle(\"/\", gwmux)\n\n\ts.server = &http.Server{\n\t\tHandler: h2c.Server{\n\t\t\tHandler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t\t\tif r.ProtoMajor == 2 &&\n\t\t\t\t\tstrings.Contains(r.Header.Get(\"Content-Type\"), \"application/grpc\") {\n\t\t\t\t\ts.grpc.ServeHTTP(w, r)\n\t\t\t\t} else {\n\t\t\t\t\tmux.ServeHTTP(w, r)\n\t\t\t\t}\n\t\t\t}),\n\t\t},\n\t}\n\n\tif err := s.server.Serve(l); err != nil {\n\t\tif err != http.ErrServerClosed {\n\t\t\treturn errors.Wrap(err, \"failed to start http server\")\n\t\t}\n\t}\n\n\treturn nil\n}", "func (cfg *ConfigServer) Start(port int) {\n\tportString := fmt.Sprintf(\":%d\", port)\n\thttp.ListenAndServe(portString, nil)\n}", "func (ga4ghServer *GA4GHServer) Start(hostPort string) {\n\tlis, err := net.Listen(\"tcp\", \":\"+hostPort)\n\tif err != nil {\n\t\tpanic(\"Cannot open port\")\n\t}\n\tgrpcServer := grpc.NewServer()\n\n\tif ga4ghServer.task != nil {\n\t\tga4gh_task_exec.RegisterTaskServiceServer(grpcServer, ga4ghServer.task)\n\t}\n\tif ga4ghServer.sched != nil {\n\t\tga4gh_task_ref.RegisterSchedulerServer(grpcServer, ga4ghServer.sched)\n\t}\n\n\tlog.Info(\"RPC server listening\", \"port\", hostPort)\n\tgrpcServer.Serve(lis)\n}", "func (l *Listen) Run(args []string) int {\n\tconfig.ListenContext = true\n\n\tvar listener, address, cpuprofile string\n\tcmdFlags := flag.NewFlagSet(\"listen\", flag.ContinueOnError)\n\tcmdFlags.Usage = func() { l.UI.Error(l.Help()) }\n\tcmdFlags.StringVar(&listener, \"ln\", \"\", \"\")\n\tcmdFlags.StringVar(&address, \"address\", \"\", \"\")\n\tcmdFlags.StringVar(&cpuprofile, \"cpuprofile\", \"\", \"write cpu profile to file\")\n\tif err := cmdFlags.Parse(args); err != nil {\n\t\treturn 1\n\t}\n\n\targErr := false\n\n\t//Check listener\n\tif listener == \"\" {\n\t\tl.UI.Error(\"Listener name must be specified\")\n\t\targErr = true\n\t}\n\n\t//Check address\n\tif address == \"\" {\n\t\tl.UI.Error(\"Address must be specified\")\n\t\targErr = true\n\t}\n\n\tif argErr {\n\t\tl.UI.Error(\"\")\n\t\tl.UI.Error(l.Help())\n\t\treturn 1\n\t}\n\n\tif cpuprofile != \"\" {\n\t\tf, err := os.Create(cpuprofile)\n\t\tif err != nil {\n\t\t\tl.UI.Error(err.Error())\n\t\t\treturn 1\n\t\t}\n\t\tpprof.StartCPUProfile(f)\n\t\tdefer pprof.StopCPUProfile()\n\t}\n\n\t//Read and record the listener config so it is available to the plugin chain\n\tserviceConfig, err := config.ReadServiceConfig(listener, l.KVStore)\n\tif err != nil {\n\t\tl.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\n\tconfig.RecordActiveConfig(serviceConfig)\n\n\t//Build the service for the named listener\n\ts, err := service.BuildServiceForListener(listener, address, l.KVStore)\n\tif err != nil {\n\t\tl.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\tl.UI.Info(fmt.Sprintf(\"***Service:\\n%s\", s))\n\n\t//Build health check context for the named listerner\n\thcc, err := service.BuildHealthContextForListener(listener, l.KVStore)\n\tif err != nil {\n\t\tl.UI.Error(err.Error())\n\t\treturn 1\n\t}\n\tservice.RecordActiveHealthCheckContext(hcc)\n\n\texitChannel := make(chan int)\n\tsignalChannel := make(chan os.Signal, 1)\n\tsignal.Notify(signalChannel, os.Interrupt)\n\n\tgo func() {\n\t\tfor _ = range signalChannel {\n\t\t\texitChannel <- 0\n\t\t}\n\t}()\n\n\tgo func(service service.Service) {\n\t\tservice.Run()\n\t\t//Run can return if it can't open ports, etc.\n\t\texitChannel <- 1\n\t}(s)\n\n\texitStatus := <-exitChannel\n\tfmt.Printf(\"exiting with status %d\\n\", exitStatus)\n\treturn exitStatus\n}", "func RegisterGRPCFlags() {\n\tGRPCPort = flag.Int(\"grpc_port\", 0, \"Port to listen on for gRPC calls\")\n}", "func WithPprof(val bool) Option {\n\treturn func(d *debug) {\n\t\td.pprof = val\n\t}\n}", "func main() {\n if len(os.Args) != 2 {\n log.Panic(\"args:\", \"<port>\")\n }\n port := os.Args[1]\n startServer(port)\n}", "func (pr *Prober) start(ctx context.Context) {\n\t// Start the default server\n\tsrv := &http.Server{}\n\tgo func() {\n\t\t<-ctx.Done()\n\t\tsrv.Close()\n\t}()\n\tgo func() {\n\t\tsrv.Serve(pr.serverListener)\n\t\tos.Exit(1)\n\t}()\n\n\tdataChan := make(chan *metrics.EventMetrics, 1000)\n\n\tgo func() {\n\t\tvar em *metrics.EventMetrics\n\t\tfor {\n\t\t\tem = <-dataChan\n\t\t\tvar s = em.String()\n\t\t\tif len(s) > logger.MaxLogEntrySize {\n\t\t\t\tglog.Warningf(\"Metric entry for timestamp %v dropped due to large size: %d\", em.Timestamp, len(s))\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Replicate the surfacer message to every surfacer we have\n\t\t\t// registered. Note that s.Write() is expected to be\n\t\t\t// non-blocking to avoid blocking of EventMetrics message\n\t\t\t// processing.\n\t\t\tfor _, surfacer := range pr.surfacers {\n\t\t\t\tsurfacer.Write(context.Background(), em)\n\t\t\t}\n\t\t}\n\t}()\n\n\t// Start a goroutine to export system variables\n\tgo sysvars.Start(ctx, dataChan, time.Millisecond*time.Duration(pr.c.GetSysvarsIntervalMsec()), pr.c.GetSysvarsEnvVar())\n\n\t// Start servers, each in its own goroutine\n\tfor _, s := range pr.Servers {\n\t\tgo s.Start(ctx, dataChan)\n\t}\n\n\t// Start RDS server if configured.\n\tif pr.rdsServer != nil {\n\t\tgo pr.rdsServer.Start(ctx, dataChan)\n\t}\n\n\t// Start RTC reporter if configured.\n\tif pr.rtcReporter != nil {\n\t\tgo pr.rtcReporter.Start(ctx)\n\t}\n\n\tif pr.c.GetDisableJitter() {\n\t\tfor _, p := range pr.Probes {\n\t\t\tgo p.Start(ctx, dataChan)\n\t\t}\n\t\treturn\n\t}\n\tpr.startProbesWithJitter(ctx, dataChan)\n}", "func startServer(grpcServer *grpc.Server, listener net.Listener) {\n\tif err := grpcServer.Serve(listener); err != nil {\n\t\tlog.Fatalf(\"Error running server: %server\", err)\n\t}\n}", "func Start(port string) {\n\tlis, err := net.Listen(\"tcp\", port)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\ts := grpc.NewServer()\n\tproject.RegisterProjectServer(s, &projectService{})\n\tservice.RegisterServiceServer(s, &serviceService{})\n\ts.Serve(lis)\n}", "func startChatServer(opts ...grpc.ServerOption) *grpc.Server {\n\t// 1: Create listener with port 8080\n\tlis, err := net.Listen(\"tcp\", \":8080\")\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to listen: %v\", err)\n\t}\n\n\t// 2: Create grpc server with grpc.ServerOption\n\tserver := grpc.NewServer(opts...)\n\n\t// 3: Register server to proto\n\tapi.RegisterChatServer(server, &ChatServer{})\n\n\t// 4: Start server\n\tgo func() {\n\t\tif err := server.Serve(lis); err != nil {\n\t\t\tlog.Fatalf(\"Failed to serve: %v\", err)\n\t\t}\n\t}()\n\n\treturn server\n}", "func startServer(config types.Config, v *Vibranium) *grpc.Server {\n\ts, err := net.Listen(\"tcp\", config.Bind)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\topts := []grpc.ServerOption{grpc.MaxConcurrentStreams(100)}\n\tgrpcServer := grpc.NewServer(opts...)\n\tpb.RegisterCoreRPCServer(grpcServer, v)\n\tgo grpcServer.Serve(s)\n\tlog.Info(\"Cluster started successfully.\")\n\treturn grpcServer\n}", "func StartGRPC(ctx context.Context, config *Config) error {\n\tg := NewGRPC(ctx, config)\n\n\t// Create our listener channel\n\tlistener, err := net.Listen(\"tcp\", config.ListenAddress())\n\tif err != nil {\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"err\": err,\n\t\t}).Fatal(fmt.Sprintf(\"could not listen on address: %s\", config.ListenAddress()))\n\t\treturn errors.Wrap(err, \"listen\")\n\t}\n\n\treturn g.Serve(listener)\n}", "func StartGRPC(ctx context.Context, bind string, logger *zap.Logger) error {\n\ts := grpc.NewServer(\n\t\tgrpc.StreamInterceptor(grpc_middleware.ChainStreamServer(\n\t\t\tgrpc_zap.StreamServerInterceptor(logger),\n\t\t\tgrpc_recovery.StreamServerInterceptor(),\n\t\t\tgrpc_prometheus.StreamServerInterceptor,\n\t\t)),\n\t\tgrpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(\n\t\t\tgrpc_zap.UnaryServerInterceptor(logger),\n\t\t\tgrpc_recovery.UnaryServerInterceptor(),\n\t\t\tgrpc_prometheus.UnaryServerInterceptor,\n\t\t)),\n\t)\n\tdefer logger.Sync()\n\tapiServer := &server{\n\t\tquit: ctx.Done(),\n\t}\n\tapi.RegisterPhraserServer(s, apiServer)\n\tgrpc_prometheus.Register(s)\n\treflection.Register(s)\n\n\tlistener, err := net.Listen(\"tcp\", bind)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tgo func() {\n\t\t<-ctx.Done()\n\t\tlog.Println(\"grpc service: stopping\")\n\t\ts.GracefulStop()\n\t\tlog.Println(\"grpc service: stopped\")\n\t\tlogger.Sync()\n\t}()\n\n\treturn s.Serve(listener)\n}", "func Test100_01SetupApi(t *testing.T) {\n\n\tt.Run(\"ConnectToBackend\", func(t *testing.T) {\n\t\tvar err error\n\t\tt.Logf(\"Listen address: %v\", accaGrpcAddr)\n\t\tConn, err = grpc.Dial(accaGrpcAddr, grpc.WithInsecure(), grpc.WithBlock())\n\t\tif err != nil {\n\t\t\tt.Fatal(err)\n\t\t}\n\t})\n\n}", "func main() {\n\t// Listen an actual port.\n\tlis, err := net.Listen(\"tcp\", fmt.Sprintf(\":%d\", 9093))\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\tdefer lis.Close()\n\n\t// Create a HTTP server for prometheus.\n\thttpServer := &http.Server{Handler: promhttp.HandlerFor(reg, promhttp.HandlerOpts{}), Addr: fmt.Sprintf(\"0.0.0.0:%d\", 9092)}\n\n\t// Create a gRPC Server with gRPC interceptor.\n\tgrpcServer := grpc.NewServer(\n\t\tgrpc.StreamInterceptor(grpcMetrics.StreamServerInterceptor()),\n\t\tgrpc.UnaryInterceptor(grpcMetrics.UnaryServerInterceptor()),\n\t)\n\n\t// Create a new api server.\n\tdemoServer := newDemoServer()\n\n\t// Register your service.\n\tpb.RegisterDemoServiceServer(grpcServer, demoServer)\n\n\t// Initialize all metrics.\n\tgrpcMetrics.InitializeMetrics(grpcServer)\n\n\t// Start your http server for prometheus.\n\tgo func() {\n\t\tif err := httpServer.ListenAndServe(); err != nil {\n\t\t\tlog.Fatal(\"Unable to start a http server.\")\n\t\t}\n\t}()\n\n\t// Start your gRPC server.\n\tlog.Fatal(grpcServer.Serve(lis))\n}", "func main() {\n logging.SetLevel(logging.DEBUG)\n\n no_tls := flag.Bool(\"no-tls\", false, \"Whether to use TLS, default false\")\n crt_path := flag.String(\"crt\", \"cert.crt\", \"Path to the TLS certificate, default `cert.crt`\")\n key_path := flag.String(\"key\", \"key.key\", \"Path to the TLS key, default `key.key`\")\n sname := flag.String(\"srv-name\", \"mycroft\", \"This server's name for SNI\")\n\n flag.Parse()\n\n logging.Info(\"Starting Server ...\")\n\n if *no_tls {\n logging.Warning(\"not using TLS\")\n err := srv.StartListen(1847, false, \"\", \"\", \"\")\n if err != nil {\n logging.Fatal(\"Could not start server: \", err.Error())\n }\n } else {\n err := srv.StartListen(1847, true, *crt_path, *key_path, *sname)\n if err != nil {\n logging.Fatal(\"Could not start server: \", err.Error())\n }\n }\n}", "func StartListen(request *restful.Request, response *restful.Response) {\n\tportstring := request.PathParameter(\"port-id\")\n\tglog.Info(\"get the port number\", portstring)\n\tportint, err := strconv.Atoi(portstring)\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\t}\n\tpid, _, err := lib.Getinfofromport(portint)\n\n\tif pid == -1 {\n\t\tresponse.WriteError(500, errors.New(\"the port is not be listend in this machine ( /proc/net/tcp and /proc/net/tcp6)\"))\n\t\treturn\n\t}\n\tif err != nil {\n\t\tresponse.WriteError(500, err)\n\t\treturn\n\t}\n\t//start listen to specific ip:port for 60s and send the data to es\n\ttimesignal := time.After(time.Second * Defaulttime)\n\t//start collect and check the timesignal every one minutes\n\tif !lib.Activeflag {\n\t\tgo lib.Startcollect(portint, Device, timesignal)\n\t\tlib.Flagmutex.Lock()\n\t\tlib.Activeflag = true\n\t\tresponse.Write([]byte(\"activated\"))\n\t\tlib.Flagmutex.Unlock()\n\t} else {\n\t\tresponse.Write([]byte(\"the server is already been activatied\"))\n\t}\n}", "func main() {\n\tservice.StartWebServer(\"8081\")\n}", "func startServer(port string, handler http.Handler) {\n\terr := http.ListenAndServe(port, handler)\n\tif err != nil {\n\t\tlogger.Fatal(\"ListenAndServe: \", err)\n\t}\n}", "func ListenAndServe(ctx context.Context, bin, address, port string) {\n\tfmt.Println(`\n\n███████╗███████╗██╗ ███████╗ ███████╗███████╗████████╗███████╗███████╗███╗ ███╗\n██╔════╝██╔════╝██║ ██╔════╝ ██╔════╝██╔════╝╚══██╔══╝██╔════╝██╔════╝████╗ ████║\n███████╗█████╗ ██║ █████╗█████╗█████╗ ███████╗ ██║ █████╗ █████╗ ██╔████╔██║\n╚════██║██╔══╝ ██║ ██╔══╝╚════╝██╔══╝ ╚════██║ ██║ ██╔══╝ ██╔══╝ ██║╚██╔╝██║\n███████║███████╗███████╗██║ ███████╗███████║ ██║ ███████╗███████╗██║ ╚═╝ ██║\n╚══════╝╚══════╝╚══════╝╚═╝ ╚══════╝╚══════╝ ╚═╝ ╚══════╝╚══════╝╚═╝ ╚═╝`)\n\tlog.Info(ctx, \"server listening\", \"bin\", bin, \"address\", address, \"port\", port)\n\thttp.ListenAndServe(fmt.Sprintf(\"%s:%s\", address, port), mux)\n}", "func startProfiling(name string) (*os.File, error) {\n\tcreateFile := func(name string) *os.File {\n\t\tpprofFile, err := os.Create(profFile(name))\n\t\tif err != nil {\n\t\t\tlog.Errorf(\"Could not create file for profiling %s\", name)\n\t\t\treturn nil\n\t\t}\n\n\t\tlog.WithFields(log.Fields{\n\t\t\t\"process\": \"profile\",\n\t\t\t\"file\": pprofFile.Name(),\n\t\t}).Infof(\"%s profile starting\", name)\n\n\t\treturn pprofFile\n\t}\n\n\t// Perform different initializing methods according to the type of profile\n\tswitch name {\n\tcase \"mutex\":\n\t\truntime.SetMutexProfileFraction(mutexProfileRate)\n\t\treturn createFile(name), nil\n\tcase \"block\":\n\t\truntime.SetBlockProfileRate(blockProfileRate)\n\t\treturn createFile(name), nil\n\tcase \"heap\", \"goroutine\":\n\t\treturn createFile(name), nil\n\tcase \"memstats\":\n\t\t// No file needed for custom sampling that records into logger\n\t\tlogMemstatsSample()\n\tcase \"cpu\":\n\t\tf := createFile(name)\n\t\tif err := pprof.StartCPUProfile(f); err != nil {\n\t\t\tlog.Error(\"Could not start CPU profile: \", err)\n\t\t}\n\n\t\treturn f, nil\n\tdefault:\n\t\terr := fmt.Errorf(\"Unsupported profile name %s\", name)\n\t\tlog.Error(err)\n\n\t\treturn nil, err\n\t}\n\n\treturn nil, nil\n}", "func main() {\n\n\targs := os.Args[1]\n\tfmt.Print(args)\n\treadConfig(args)\n\tfmt.Println(configuration.Pss)\n\n\t// start server pass connection\n\tpsserverCtx, err := newClientContext(configuration.Pss[0])\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\ts1 := &clientPassword{psserverCtx}\n\tpsCon = *s1\n\n\t//start db client\n\tdbserverCtx, err := newDBContext(configuration.Dbs[0])\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\ts2 := &clientDB{dbserverCtx}\n\tdbConn = *s2\n\n\t//start load balance connection\n\tdbserverCtxLB, err := newDBContextLoadBalancing()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\ts3 := &clientDBLoadBalancing{dbserverCtxLB}\n\tdbConnLB = *s3\n\n\t// conect to profiles\n\n\tprofilesCtx, err := newProfilesServiceContext(configuration.Prof[0])\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\ts4 := &profileServer{profilesCtx}\n\tprofSerConn = *s4\n\n\n\tCreateUser(\"use45\",\"token\",\"name\",\"description\");\n\t//fmt.Print(\"Service started\")\n\tlog.Printf(\"Started: %v\", \" service\")\n\n\tlis, err := net.Listen(\"tcp\", port)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to listen: %v\", err)\n\t}\n\ts := grpc.NewServer()\n\tpb.RegisterUserAuthenticationServer(s, &server{})\n\tif err := s.Serve(lis); err != nil {\n\t\tlog.Fatalf(\"failed to serve: %v\", err)\n\t}\n}", "func (s *Server) startGrpcLoop(grpcPort int) {\n\tdefer s.wg.Done()\n\tParams := &paramtable.Get().QueryNodeGrpcServerCfg\n\tvar kaep = keepalive.EnforcementPolicy{\n\t\tMinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection\n\t\tPermitWithoutStream: true, // Allow pings even when there are no active streams\n\t}\n\n\tvar kasp = keepalive.ServerParameters{\n\t\tTime: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active\n\t\tTimeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead\n\t}\n\tvar lis net.Listener\n\tvar err error\n\terr = retry.Do(s.ctx, func() error {\n\t\taddr := \":\" + strconv.Itoa(grpcPort)\n\t\tlis, err = net.Listen(\"tcp\", addr)\n\t\tif err == nil {\n\t\t\ts.querynode.SetAddress(fmt.Sprintf(\"%s:%d\", Params.IP, lis.Addr().(*net.TCPAddr).Port))\n\t\t} else {\n\t\t\t// set port=0 to get next available port\n\t\t\tgrpcPort = 0\n\t\t}\n\t\treturn err\n\t}, retry.Attempts(10))\n\tif err != nil {\n\t\tlog.Error(\"QueryNode GrpcServer:failed to listen\", zap.Error(err))\n\t\ts.grpcErrChan <- err\n\t\treturn\n\t}\n\n\topts := tracer.GetInterceptorOpts()\n\ts.grpcServer = grpc.NewServer(\n\t\tgrpc.KeepaliveEnforcementPolicy(kaep),\n\t\tgrpc.KeepaliveParams(kasp),\n\t\tgrpc.MaxRecvMsgSize(Params.ServerMaxRecvSize.GetAsInt()),\n\t\tgrpc.MaxSendMsgSize(Params.ServerMaxSendSize.GetAsInt()),\n\t\tgrpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(\n\t\t\totelgrpc.UnaryServerInterceptor(opts...),\n\t\t\tlogutil.UnaryTraceLoggerInterceptor,\n\t\t\tinterceptor.ClusterValidationUnaryServerInterceptor(),\n\t\t\tinterceptor.ServerIDValidationUnaryServerInterceptor(func() int64 {\n\t\t\t\tif s.serverID.Load() == 0 {\n\t\t\t\t\ts.serverID.Store(paramtable.GetNodeID())\n\t\t\t\t}\n\t\t\t\treturn s.serverID.Load()\n\t\t\t}),\n\t\t)),\n\t\tgrpc.StreamInterceptor(grpc_middleware.ChainStreamServer(\n\t\t\totelgrpc.StreamServerInterceptor(opts...),\n\t\t\tlogutil.StreamTraceLoggerInterceptor,\n\t\t\tinterceptor.ClusterValidationStreamServerInterceptor(),\n\t\t\tinterceptor.ServerIDValidationStreamServerInterceptor(func() int64 {\n\t\t\t\tif s.serverID.Load() == 0 {\n\t\t\t\t\ts.serverID.Store(paramtable.GetNodeID())\n\t\t\t\t}\n\t\t\t\treturn s.serverID.Load()\n\t\t\t}),\n\t\t)))\n\tquerypb.RegisterQueryNodeServer(s.grpcServer, s)\n\n\tctx, cancel := context.WithCancel(s.ctx)\n\tdefer cancel()\n\n\tgo funcutil.CheckGrpcReady(ctx, s.grpcErrChan)\n\tif err := s.grpcServer.Serve(lis); err != nil {\n\t\tlog.Debug(\"QueryNode Start Grpc Failed!!!!\")\n\t\ts.grpcErrChan <- err\n\t}\n\n}", "func Run(config *Config, storage *server.Storager) {\n\tnetwork, address := networkAddresFromString(config.Address)\n\n\tconn, err := net.Listen(network, address)\n\tif err != nil {\n\t\tlog.Fatalln(\"could not listen to address\", config.Address)\n\t}\n\n\tlog.Printf(\"starting grpc server on %s\", address)\n\n\tvar (\n\t\tsrv *http.Server\n\t\tgrpcServer *grpc.Server\n\t\trestMux *runtime.ServeMux\n\t\ttlsConfig *tls.Config\n\t\tctx = context.Background()\n\t\thttpMux = http.NewServeMux()\n\t)\n\n\ttlsConfig, err = tlsClientConfig(config.CAFile, config.CertFile, config.KeyFile, config.ServerName)\n\tif err != nil {\n\t\tlog.Fatal(\"Failed to create tls config\", err)\n\t}\n\n\tdialOptions := getDialOptions(tlsConfig)\n\tserverOptions := getServerOptions(tlsConfig)\n\n\tgrpcServer = newGrpcServer(storage, serverOptions...)\n\trestMux, _ = newRestMux(ctx, address, dialOptions...)\n\n\thttpMux.Handle(\"/\", restMux)\n\n\tmergeHandler := grpcHandlerFunc(grpcServer, httpMux)\n\n\t// Setup the CORS middleware. If `config.CORSAllowedOrigins` is empty, no CORS\n\t// Origins will be allowed through.\n\tcors := cors.New(cors.Options{\n\t\tAllowedOrigins: config.CORSAllowedOrigins,\n\t})\n\n\tsrv = &http.Server{\n\t\tHandler: cors.Handler(h2c.NewHandler(mergeHandler, &http2.Server{})),\n\t\tTLSConfig: tlsConfig,\n\t}\n\n\t// blocking call\n\tif tlsConfig != nil {\n\t\terr = srv.Serve(tls.NewListener(conn, srv.TLSConfig))\n\t} else {\n\t\terr = srv.Serve(conn)\n\t}\n\thandleShutdown(err)\n\tlog.Println(\"Grpc API stopped\")\n}", "func (s *Server) Start() error {\n\tconfig, err := rest.InClusterConfig()\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\ts.client, err = kubernetes.NewForConfig(config)\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\n\ts.conn, err = icmp.ListenPacket(\"ip4:icmp\", \"0.0.0.0\")\n\tif err != nil {\n\t\treturn trace.Wrap(err)\n\t}\n\n\ts.clock = clockwork.NewRealClock()\n\tgo s.loop()\n\tgo s.loopServiceDiscovery()\n\tgo s.serve()\n\n\tmux := http.ServeMux{}\n\tmux.Handle(\"/metrics\", promhttp.Handler())\n\ts.httpServer = &http.Server{Addr: fmt.Sprint(\":\", s.config.PrometheusPort), Handler: &mux}\n\n\t// Workaround for https://github.com/gravitational/gravity/issues/2320\n\t// Disable keep-alives to avoid the client/server hanging unix domain sockets that don't get cleaned up.\n\ts.httpServer.SetKeepAlivesEnabled(false)\n\n\tgo func() {\n\t\tif err := s.httpServer.ListenAndServe(); err != http.ErrServerClosed {\n\t\t\ts.Fatalf(\"ListenAndServe(): %s\", err)\n\t\t}\n\t}()\n\n\tif s.config.PrometheusSocket != \"\" {\n\t\t_ = os.Remove(s.config.PrometheusSocket)\n\n\t\tunixListener, err := net.Listen(\"unix\", s.config.PrometheusSocket)\n\t\tif err != nil {\n\t\t\treturn trace.Wrap(err)\n\t\t}\n\n\t\tgo func() {\n\t\t\tif err := s.httpServer.Serve(unixListener); err != http.ErrServerClosed {\n\t\t\t\ts.Fatalf(\"Unix Listen(): %s\", err)\n\t\t\t}\n\t\t}()\n\t}\n\n\ts.Info(\"Started nethealth with config:\")\n\ts.Info(\" PrometheusSocket: \", s.config.PrometheusSocket)\n\ts.Info(\" PrometheusPort: \", s.config.PrometheusPort)\n\ts.Info(\" Namespace: \", s.config.Namespace)\n\ts.Info(\" NodeName: \", s.config.NodeName)\n\ts.Info(\" Selector: \", s.selector)\n\ts.Info(\" ServiceDiscoveryQuery: \", s.config.ServiceDiscoveryQuery)\n\n\treturn nil\n}", "func Start(e *echo.Echo) {\n Connect()\n e.Logger.Fatal(e.Start(\":\" + os.Getenv(\"PORT\")))\n}", "func (node *RaftNode) StartGRPCServer(ctx context.Context, grpc_address string, listener *net.TCPListener, testing bool) {\n\n\t// Shut down the gRPC server if the context is cancelled\n\tgo func() {\n\n\t\t// Block till the context is cancelled\n\t\t<-ctx.Done()\n\n\t\t// Stop the server\n\t\tnode.Meta.grpc_server.GracefulStop()\n\n\t\tif !testing {\n\t\t\tnode.Meta.shutdown_chan <- \"gRPC server shutdown successful.\"\n\t\t}\n\n\t}()\n\n\t// Start the server\n\tlog.Printf(\"\\nStarting gRPC server at address %v...\\n\", grpc_address)\n\terr := node.Meta.grpc_server.Serve(listener) // Serve will return a non-nil error unless Stop or GracefulStop is called.\n\n\tCheckErrorFatal(err)\n}", "func (s *Server) SetupProfile(profile *Profile) {\n\tif s.proxy == nil {\n\t\ts.proxy = newProxy()\n\t}\n\ts.proxy.SetProfile(profile)\n}", "func main() {\n entry_point := \"/cpf_dv/\"\n os.Setenv(\"ENTRY_POINT\", entry_point)\n http.HandleFunc(entry_point, fun.CpfFull)\n log.Fatal(http.ListenAndServe(\":\"+os.Getenv(\"PORT\"), nil))\n}", "func startLocalHttpServer(ch chan OAuthSecurity) (port int, err error) {\n\tlistener, err := net.Listen(\"tcp\", \":\"+PORT)\n\tif err != nil {\n\t\treturn\n\t}\n\tport = listener.Addr().(*net.TCPAddr).Port\n\th := http.NewServeMux()\n\th.HandleFunc(\"/\", func(w http.ResponseWriter, r *http.Request) {\n\t\tquery := r.URL.Query()\n\t\tvar oa OAuthSecurity\n\t\toa.AuthCode = query.Get(\"code\")\n\t\tch <- oa\n\t\tif _, ok := r.Header[\"X-Requested-With\"]; ok == false {\n\t\t\tPrintSuccess(w, \"You can close your browser window now\")\n\t\t}\n\t\tlistener.Close()\n\t})\n\tgo http.Serve(listener, h)\n\treturn\n}", "func (s *Server) Start(ctx context.Context, listenPort uint) error {\n\tif err := s.prepare(ctx, listenPort); err != nil {\n\t\treturn err\n\t}\n\n\tif s.enableAPI {\n\t\treturn s.srv.ListenAndServe()\n\t}\n\n\treturn nil\n}", "func StartAndListen(network, address string, port, bufferSize int) (Collector, error) {\n\tswitch network {\n\tcase \"udp\":\n\t\treturn StartAndListenUDP(address, port, bufferSize)\n\tdefault:\n\t\treturn nil, notImplementedNetworkError{network: network}\n\t}\n}" ]
[ "0.66272086", "0.6347573", "0.6347573", "0.62863874", "0.6039441", "0.5928889", "0.5855138", "0.5710872", "0.5701532", "0.56342703", "0.5629403", "0.55609417", "0.5488648", "0.5400785", "0.5398714", "0.5365184", "0.5349471", "0.5321848", "0.53112197", "0.5267711", "0.52288973", "0.51994056", "0.5184464", "0.51715636", "0.5152233", "0.50877446", "0.5087053", "0.5084333", "0.50729996", "0.49949333", "0.49712482", "0.49705106", "0.49573064", "0.49444067", "0.4944241", "0.4942405", "0.49208236", "0.49072176", "0.49013627", "0.4887679", "0.48774025", "0.4877392", "0.48765936", "0.48695886", "0.4855057", "0.48507416", "0.48456448", "0.48436126", "0.48357928", "0.48247498", "0.48243752", "0.48215625", "0.4815323", "0.48115754", "0.4811062", "0.47987896", "0.47880763", "0.47708374", "0.47695473", "0.4769036", "0.47613138", "0.47571632", "0.475691", "0.47547808", "0.47374898", "0.47347876", "0.47243774", "0.471819", "0.47133157", "0.47025874", "0.46986565", "0.4697954", "0.46935615", "0.46880463", "0.46825442", "0.46807966", "0.4680711", "0.46754333", "0.46735516", "0.46734792", "0.46707866", "0.4665533", "0.46637854", "0.46637455", "0.46613827", "0.4659508", "0.4652955", "0.4645251", "0.46443114", "0.4643669", "0.46432838", "0.46362352", "0.46335167", "0.4630375", "0.46269557", "0.46246493", "0.46212152", "0.4619892", "0.46155623", "0.46150547" ]
0.79906243
0
Make all file names in the config absolute
func (s *Server) makeFileNamesAbsolute() error { log.Debug("Making server filenames absolute") err := stls.AbsTLSServer(&s.Config.TLS, s.HomeDir) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func mkAbsolutePaths(config *ConfigurationOptions) {\n\tconfig.NodeAgentWorkloadHomeDir = filepath.Join(config.NodeAgentManagementHomeDir, config.NodeAgentWorkloadHomeDir)\n\tconfig.NodeAgentCredentialsHomeDir = filepath.Join(config.NodeAgentManagementHomeDir, config.NodeAgentCredentialsHomeDir)\n\tconfig.NodeAgentManagementAPI = filepath.Join(config.NodeAgentManagementHomeDir, config.NodeAgentManagementAPI)\n}", "func (r *Repo) MakeAbs(pkgs pacman.Packages) {\n\tfor _, p := range pkgs {\n\t\tfilepath := path.Join(r.Directory, path.Base(p.Filename))\n\t\tif p.Filename != filepath {\n\t\t\tterm.Debugf(\"Note: package filename data incorrect: %s\\n\", p.Filename)\n\t\t}\n\t\tp.Filename = filepath\n\t}\n}", "func absoluteConfigPath(configPath string) string {\n\tif path.IsAbs(configPath) {\n\t\treturn configPath\n\t}\n\treturn path.Join(\"/usr/local/etc/virtual-usb-printer/\", configPath)\n}", "func DefaultConfigFileAbsolute() string {\n\treturn fmt.Sprintf(\"%s/%s\", DefaultDir, Filename)\n}", "func (w *Workspace) SetAbsoluteFilepaths(path string) {\n\t// itterate over all elements\n\tfor pi, p := range w.Projects {\n\t\tfor si, s := range p.Systems {\n\t\t\tfor fi, f := range s.Files {\n\t\t\t\tif !filepath.IsAbs(f.FilePathName) {\n\t\t\t\t\tw.Projects[pi].Systems[si].Files[fi].FilePathName = filepath.Join(path, f.FilePathName)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}", "func convertPathToAbs(outfile string, target map[string]string) string {\n\towd := target[\"owd\"]\n\tif !filepath.IsAbs(outfile) {\n\t\toutfile = filepath.Join(owd, outfile)\n\t}\n\treturn outfile\n}", "func MakeFileNamesAbsolute(files []*string, home string) error {\n\tfor _, filePtr := range files {\n\t\tabs, err := MakeFileAbs(*filePtr, home)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\t*filePtr = abs\n\t}\n\treturn nil\n}", "func (l *tDjangoAssetLoader) Abs(base, name string) string {\n\tif stdPath.IsAbs(name) {\n\t\treturn name\n\t}\n\n\treturn stdPath.Join(l.rootDir, name)\n}", "func absolutizeClasspathEntries(workingDir string, relativeClasspathEntries []string) []string {\n\tabsoluteClasspathEntries := make([]string, len(relativeClasspathEntries))\n\tfor i, entry := range relativeClasspathEntries {\n\t\tabsoluteClasspathEntries[i] = path.Join(workingDir, entry)\n\t}\n\treturn absoluteClasspathEntries\n}", "func absPaths(base string, paths []string) []string {\n\tp := make([]string, len(paths))\n\tfor ix, v := range paths {\n\t\tp[ix] = filepath.Join(base, v)\n\t}\n\treturn p\n}", "func MakeAbsolute(path string) string {\n\tif !filepath.IsAbs(path) {\n\t\twd, err := os.Getwd()\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tpath = filepath.Join(wd, path)\n\t}\n\tif !Exists(path) {\n\t\tpanic(fmt.Sprintf(\"%s: File not found\", path))\n\t}\n\treturn path\n}", "func (s *GitTestHelper) toAbsPath(name string) string {\n\tif path.IsAbs(name) {\n\t\treturn name\n\t}\n\twd := s.Getwd()\n\tif wd == \"\" {\n\t\twd, _ = os.Getwd()\n\t\tif wd == \"\" {\n\t\t\twd = os.TempDir()\n\t\t}\n\t}\n\n\treturn path.Join(wd, name)\n}", "func rewriteAbsoluteURLs(n *html.Node, base *url.URL, tagAttrs []string) {\n\tfor _, attr := range tagAttrs {\n\t\tif v, ok := htmlnode.GetAttributeVal(n, attr); ok {\n\t\t\thtmlnode.SetAttribute(n, \"\", attr, amphtml.RewriteAbsoluteURL(base, v))\n\t\t}\n\t}\n}", "func ToAbsolutePath(base, path string) string {\n\tif filepath.IsAbs(path) {\n\t\treturn path\n\t}\n\treturn filepath.Clean(filepath.Join(base, path))\n}", "func ensureAbs(path string) string {\n\tif filepath.IsAbs(path) {\n\t\treturn path\n\t}\n\treturn filepath.Join(WorkDir(), path)\n}", "func RelativizeAuthInfoLocalPaths(authInfo *clientcmdapi.AuthInfo) error {\n\tif len(authInfo.LocationOfOrigin) == 0 {\n\t\treturn fmt.Errorf(\"no location of origin for %v\", authInfo)\n\t}\n\tbase, err := filepath.Abs(filepath.Dir(authInfo.LocationOfOrigin))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"could not determine the absolute path of config file %s: %v\", authInfo.LocationOfOrigin, err)\n\t}\n\n\tif err := ResolvePaths(GetAuthInfoFileReferences(authInfo), base); err != nil {\n\t\treturn err\n\t}\n\tif err := RelativizePathWithNoBacksteps(GetAuthInfoFileReferences(authInfo), base); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func normPath(bases []string, abspath string) (string, error) {\n\tfor _, base := range bases {\n\t\tabsbase, err := filepath.Abs(base)\n\t\tif isUnder(absbase, abspath) {\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\trelpath, err := filepath.Rel(absbase, abspath)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\treturn filepath.Join(base, relpath), nil\n\t\t}\n\t}\n\treturn abspath, nil\n}", "func sanitizeFileName(v string) string {\n\treturn path.Clean(strings.ReplaceAll(v, \"../\", \"\"))\n}", "func ToAbs(p string) string {\n\tvar err error\n\tp, err = filepath.Abs(p)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn p\n}", "func shortNameAliasesConfPath(ctx *types.SystemContext) (string, error) {\n\tif ctx != nil && len(ctx.UserShortNameAliasConfPath) > 0 {\n\t\treturn ctx.UserShortNameAliasConfPath, nil\n\t}\n\n\tif rootless.GetRootlessEUID() == 0 {\n\t\t// Root user or in a non-conforming user NS\n\t\treturn filepath.Join(\"/var/cache\", userShortNamesFile), nil\n\t}\n\n\t// Rootless user\n\tcacheRoot, err := homedir.GetCacheHome()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn filepath.Join(cacheRoot, userShortNamesFile), nil\n}", "func ExpandAbs(path string) string {\n\tpath, _ = filepath.Abs(ExpandHome(path))\n\treturn path\n}", "func forceAbsolute(base, maybeRel string) (string, error) {\n\tmaybeRelU, err := url.Parse(maybeRel)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tbaseU, err := url.Parse(base)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tactualU := baseU.ResolveReference(maybeRelU)\n\treturn actualU.String(), nil\n}", "func Abs(filename string) string {\n\tpath, err := filepath.Abs(filename)\n\tif err != nil {\n\t\treturn filename\n\t}\n\treturn path\n}", "func ToAbsPath(path, root string) (string, error) {\n\tif filepath.IsAbs(path) {\n\t\treturn path, nil\n\t}\n\treturn filepath.Join(root, path), nil\n}", "func AbsoluteBazelTarget(bld *build.File, ruleName string) string {\n\tif strings.HasPrefix(ruleName, \"//\") {\n\t\t// already absolute\n\t\tif colonIdx := strings.LastIndex(ruleName, \":\"); colonIdx == -1 {\n\t\t\t// expand shorthand syntax\n\t\t\treturn ruleName + \":\" + ruleName[strings.LastIndex(ruleName, \"/\")+1:]\n\t\t}\n\t\treturn ruleName\n\t}\n\tpkg := platform.Normalize(filepath.Dir(bld.Path))\n\treturn fmt.Sprintf(\"//%s:%s\", pkg, strings.TrimPrefix(ruleName, \":\"))\n}", "func Abs(name string) (string, error) {\n\t// Check in case of paths like \"/something/~/something/\"\n\tif len(name) >2 && name[:2] == \"~/\" {\n\t\tusr, err := user.Current()\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tdir := usr.HomeDir\n\t\tname = filepath.Join(dir, name[2:])\n\t}\n\tif path.IsAbs(name) {\n\t\treturn path.Clean(name), nil\n\t}\n\twd, err := os.Getwd()\n\treturn path.Clean(path.Join(wd, name)), err\n}", "func (c Config) shouldExamine(root, absPath string) bool {\n\troot = filepath.ToSlash(root) // Canonicalize\n\tabsPath = filepath.ToSlash(absPath) // Canonicalize\n\trelPath, err := filepath.Rel(root, absPath)\n\tif err != nil {\n\t\treturn false\n\t}\n\trelPath = filepath.ToSlash(relPath) // Canonicalize\n\n\tres := false\n\tfor _, rule := range c.Paths {\n\t\tres = rule(relPath, res)\n\t}\n\n\treturn res\n}", "func resolveAndGlobFilepaths(baseDir string, utf *unitTestFile) error {\n\tfor i, rf := range utf.RuleFiles {\n\t\tif rf != \"\" && !filepath.IsAbs(rf) {\n\t\t\tutf.RuleFiles[i] = filepath.Join(baseDir, rf)\n\t\t}\n\t}\n\n\tvar globbedFiles []string\n\tfor _, rf := range utf.RuleFiles {\n\t\tm, err := filepath.Glob(rf)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(m) == 0 {\n\t\t\tfmt.Fprintln(os.Stderr, \" WARNING: no file match pattern\", rf)\n\t\t}\n\t\tglobbedFiles = append(globbedFiles, m...)\n\t}\n\tutf.RuleFiles = globbedFiles\n\treturn nil\n}", "func fullPath(path string) (string, error) {\n\tpath = strings.Replace(path, \"~\", os.Getenv(\"HOME\"), 1)\n\n\treturn filepath.Abs(path)\n}", "func (c Corpus) Abs(rel string) string {\n\tif string(c) == \"\" {\n\t\treturn \"\"\n\t}\n\treturn filepath.Join(string(c), rel)\n}", "func addRonYamlFile(oYamlPath string, configs *[]*RawConfig) (string, error) {\n\tvar err error\n\tfoundConfigDir := \"\"\n\tif oYamlPath == \"\" {\n\t\toYamlPath, err = findConfigFile()\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tfoundConfigDir = filepath.Dir(oYamlPath)\n\t}\n\n\tif oYamlPath != \"\" {\n\t\toConfig, err := LoadConfigFile(oYamlPath)\n\t\tif err != nil {\n\t\t\tfmt.Println(color.Red(err.Error()))\n\t\t\treturn \"\", err\n\t\t}\n\t\toConfig.Filepath = oYamlPath\n\t\toConfig.Envs = strings.TrimSpace(oConfig.Envs)\n\t\toConfig.Remotes = strings.TrimSpace(oConfig.Remotes)\n\t\toConfig.Targets = strings.TrimSpace(oConfig.Targets)\n\t\t// prepend the override config\n\t\t*configs = append([]*RawConfig{oConfig}, *configs...)\n\t}\n\n\treturn foundConfigDir, err\n}", "func getAbsPath(givenPath string) string {\n\tmanifestPath, err := filepath.Abs(givenPath)\n\tif err != nil {\n\t\tlog.Fatal().Err(err).Msgf(\"Unable to convert to Absolute file path.\")\n\t}\n\treturn manifestPath\n}", "func ResolveLocalPaths(config *clientcmdapi.Config) error {\n\tfor _, cluster := range config.Clusters {\n\t\tif len(cluster.LocationOfOrigin) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tbase, err := filepath.Abs(filepath.Dir(cluster.LocationOfOrigin))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"could not determine the absolute path of config file %s: %v\", cluster.LocationOfOrigin, err)\n\t\t}\n\n\t\tif err := ResolvePaths(GetClusterFileReferences(cluster), base); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tfor _, authInfo := range config.AuthInfos {\n\t\tif len(authInfo.LocationOfOrigin) == 0 {\n\t\t\tcontinue\n\t\t}\n\t\tbase, err := filepath.Abs(filepath.Dir(authInfo.LocationOfOrigin))\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"could not determine the absolute path of config file %s: %v\", authInfo.LocationOfOrigin, err)\n\t\t}\n\n\t\tif err := ResolvePaths(GetAuthInfoFileReferences(authInfo), base); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func MakeFileAbs(file, dir string) (string, error) {\n\tif file == \"\" {\n\t\treturn \"\", nil\n\t}\n\tif filepath.IsAbs(file) {\n\t\treturn file, nil\n\t}\n\tpath, err := filepath.Abs(filepath.Join(dir, file))\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"Failed making '%s' absolute based on '%s'\", file, dir)\n\t}\n\treturn path, nil\n}", "func AbsPath(elem ...string) string {\r\n\tp := path.Join(elem...)\r\n\tif filepath.IsAbs(p) {\r\n\t\treturn p\r\n\t}\r\n\treturn path.Join(workDir, p)\r\n}", "func makeAbsoluteHref(baseURL string, href string) string {\n\tif strings.HasPrefix(href, \"http\") {\n\t\treturn href\n\t} else {\n\t\treturn baseURL + href\n\t}\n}", "func absJoin(dir, name string) (string, error) {\n\tif name == \"\" {\n\t\treturn filepath.Abs(dir)\n\t}\n\n\tif filepath.IsAbs(name) {\n\t\treturn name, nil\n\t}\n\n\tif len(name) > 0 && name[0] == '~' && (len(name) == 1 || name[1] == '/' || name[1] == '\\\\') {\n\t\treturn expendHome(name)\n\t}\n\n\treturn absPath(filepath.Join(dir, name))\n}", "func MakeFileAbs(file, dir string) (string, error) {\n\tif file == \"\" {\n\t\treturn \"\", nil\n\t}\n\tif filepath.IsAbs(file) {\n\t\treturn file, nil\n\t}\n\tpath, err := filepath.Abs(filepath.Join(dir, file))\n\tif err != nil {\n\t\treturn \"\", errors.Wrapf(err, \"Failed making '%s' absolute based on '%s'\", file, dir)\n\t}\n\treturn path, nil\n}", "func (c Config) shouldExamine(root, absPath string) bool {\n\troot = filepath.ToSlash(root) // Canonicalize\n\tabsPath = filepath.ToSlash(absPath) // Canonicalize\n\trelPath, err := filepath.Rel(root, absPath)\n\tif err != nil {\n\t\treturn false\n\t}\n\n\tres := true\n\tfor _, rule := range c.Paths {\n\t\tres = rule(relPath, res)\n\t}\n\n\treturn res\n}", "func (this *File) AbsolutePath() string {\n\treturn path.Join(this.Parent.AbsolutePath(), this.Attrs.Name)\n}", "func AbsPathify(workingDir, inPath string) string {\n\tif filepath.IsAbs(inPath) {\n\t\treturn filepath.Clean(inPath)\n\t}\n\treturn filepath.Join(workingDir, inPath)\n}", "func GetPathRelativeToConfig(configuration *viper.Viper, key string) string {\n\tconfigFile := configuration.ConfigFileUsed()\n\tp := configuration.GetString(key)\n\tif p == \"\" || filepath.IsAbs(p) {\n\t\treturn p\n\t}\n\treturn filepath.Clean(filepath.Join(filepath.Dir(configFile), p))\n}", "func (p *Project) normalizeConfigPath(configPath string) string {\n\tbaseName := path.Base(configPath)\n\tif baseName == \"dagger.json\" {\n\t\treturn configPath\n\t}\n\treturn path.Join(configPath, \"dagger.json\")\n}", "func (c *Collection) AbsDir() string {\n\treturn filepath.Join(c.cfg.SourceDir(), c.PathPrefix())\n}", "func ConfigPaths(component string) []string {\n\treturn []string{\n\t\tcomponent + \".yaml\",\n\t\tfmt.Sprintf(\"%s/%s.yaml\", ConfigDir, component),\n\t}\n}", "func makeAbsURL(host, path string) string {\n\tpathObj, err := url.Parse(path)\n\tif err != nil {\n\t\treturn host\n\t}\n\tif pathObj.IsAbs() {\n\t\treturn pathObj.String()\n\t}\n\thostObj, err := url.Parse(host)\n\tif err == nil {\n\t\tpathObj.Scheme = hostObj.Scheme\n\t\tpathObj.Host = hostObj.Host\n\t}\n\treturn pathObj.String()\n}", "func (lbase *Logbase) MakeIndexfileRelPath(fnum LBUINT) string {\n\treturn MakeIndexfileName(fnum, lbase.config.INDEXFILE_NAME_EXTENSION)\n}", "func (p *PathSpec) AbsURL(in string, addLanguage bool) string {\n\tisAbs, err := p.IsAbsURL(in)\n\tif err != nil {\n\t\treturn in\n\t}\n\tif isAbs || strings.HasPrefix(in, \"//\") {\n\t\t// It is already absolute, return it as is.\n\t\treturn in\n\t}\n\n\tbaseURL := p.getBaseURLRoot(in)\n\n\tif addLanguage {\n\t\tprefix := p.GetLanguagePrefix()\n\t\tif prefix != \"\" {\n\t\t\thasPrefix := false\n\t\t\t// avoid adding language prefix if already present\n\t\t\tin2 := in\n\t\t\tif strings.HasPrefix(in, \"/\") {\n\t\t\t\tin2 = in[1:]\n\t\t\t}\n\t\t\tif in2 == prefix {\n\t\t\t\thasPrefix = true\n\t\t\t} else {\n\t\t\t\thasPrefix = strings.HasPrefix(in2, prefix+\"/\")\n\t\t\t}\n\n\t\t\tif !hasPrefix {\n\t\t\t\taddSlash := in == \"\" || strings.HasSuffix(in, \"/\")\n\t\t\t\tin = path.Join(prefix, in)\n\n\t\t\t\tif addSlash {\n\t\t\t\t\tin += \"/\"\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn paths.MakePermalink(baseURL, in).String()\n}", "func (s SourceFilesystems) MakeStaticPathRelative(filename string) string {\n\tfor _, staticFs := range s.Static {\n\t\trel, _ := staticFs.MakePathRelative(filename)\n\t\tif rel != \"\" {\n\t\t\treturn rel\n\t\t}\n\t}\n\treturn \"\"\n}", "func (f *File) AbsolutePath() string {\n\tif f.State == Uploaded {\n\t\treturn config.rootPath + \"/db/temp/\" + f.UploaderUsername + \"/\" + f.UUID + \".\" + f.Extension\n\t}\n\treturn config.rootPath + \"/static/content/\" + f.UUID + \".\" + f.Extension\n}", "func PathAbs(rootdir, path string) string {\n\tif filepath.IsAbs(path) {\n\t\treturn path\n\t}\n\treturn filepath.Join(rootdir, path)\n}", "func DefaultPersisConfigPath(v *viper.Viper) string {\n\t// persist using config\n\tf := v.ConfigFileUsed() // ./conf/.sole.yaml\n\tif f == \"\" {\n\t\treturn \".use.yaml\"\n\t}\n\tdir := filepath.Dir(f)\n\tbase := filepath.Base(f)\n\text := filepath.Ext(f)\n\tname := strings.TrimPrefix(strings.TrimSuffix(base, ext), \".\")\n\n\treturn filepath_.Pathify(filepath.Join(dir, \".use.\"+name+\".yaml\")) // /root/.use.sole.yaml\n}", "func Abs(path string) string {\n\tabspath, err := filepath.Abs(Expand(path))\n\tif err != nil {\n\t\treturn path\n\t}\n\treturn abspath\n}", "func (context *Context) AbsGlob(value string) (abs string, absprefix string, err error) {\n\texpanded, err := context.ExpandEnv(value)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tabs, err = filepath.Abs(expanded)\n\tif err != nil {\n\t\treturn \"\", \"\", err\n\t}\n\n\tabsprefix = extractGlobPrefix(abs)\n\n\t// TODO: verify absprefix\n\n\treturn abs, absprefix, nil\n}", "func PathJoin(incoming []string) string { return filesys.PathJoin(incoming) }", "func dirpathAssets(config *MainConfig) string {\n\tpath := filepath.Join(config.Outpath, DIR_NAME_ASSETS)\n\n\tdirpathMkall(path)\n\n\treturn path\n}", "func MakeAbsolutePath(goos, path string) string {\n\tif goos != \"windows\" {\n\t\treturn filepath.Clean(\"/\" + path)\n\t}\n\t// These are all for windows\n\t// If there is a colon, give up.\n\tif strings.Contains(path, \":\") {\n\t\treturn path\n\t}\n\t// If there is a slash, but no drive, add 'c:'\n\tif strings.HasPrefix(path, \"/\") || strings.HasPrefix(path, \"\\\\\") {\n\t\treturn \"c:\" + path\n\t}\n\t// Otherwise, add 'c:\\'\n\treturn \"c:\\\\\" + path\n}", "func RelativePathBaseOn(basePath, filePath string) string {\n\tif filepath.IsAbs(filePath) {\n\t\treturn filePath\n\t}\n\treturn filepath.Join(basePath, filePath)\n}", "func RelativizeClusterLocalPaths(cluster *clientcmdapi.Cluster) error {\n\tif len(cluster.LocationOfOrigin) == 0 {\n\t\treturn fmt.Errorf(\"no location of origin for %s\", cluster.Server)\n\t}\n\tbase, err := filepath.Abs(filepath.Dir(cluster.LocationOfOrigin))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"could not determine the absolute path of config file %s: %v\", cluster.LocationOfOrigin, err)\n\t}\n\n\tif err := ResolvePaths(GetClusterFileReferences(cluster), base); err != nil {\n\t\treturn err\n\t}\n\tif err := RelativizePathWithNoBacksteps(GetClusterFileReferences(cluster), base); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func Abs(path string) string {\n\tabsPath, err := filepath.Abs(path)\n\tif err != nil {\n\t\treturn path\n\t}\n\treturn absPath\n}", "func resolvConfForRoot() {\n\tif _, err := os.Stat(\"/var/lib/istio/resolv.conf\"); !os.IsNotExist(err) {\n\t\tlog.Println(\"Alternate resolv.conf exists\")\n\t\treturn\n\t}\n\n\tos.MkdirAll(\"/var/lib/istio\", 0755)\n\tdata, err := os.ReadFile(\"/etc/resolv.conf\")\n\tif err != nil {\n\t\tlog.Println(\"Failed to read resolv.conf, DNS interception will fail \", err)\n\t\treturn\n\t}\n\terr = os.WriteFile(\"/var/lib/istio/resolv.conf\", data, 0755)\n\tif err != nil {\n\t\tlog.Println(\"Failed to create alternate resolv.conf, DNS interception will fail \", err)\n\t\treturn\n\t}\n\terr = os.WriteFile(\"/etc/resolv.conf\", []byte(`nameserver: 127.0.0.1\\nsearch: google.internal.`), 755)\n\tif err != nil {\n\t\tlog.Println(\"Failed to create resolv.conf, DNS interception will fail \", err)\n\t\treturn\n\t}\n\tlog.Println(\"Adjusted resolv.conf\")\n}", "func getConfFilePath(root, clusterName string) string {\n\treturn fmt.Sprintf(\"%s/%s.config\", root, clusterName)\n}", "func (c *Config) GetAbsPath(relPath string) string {\n\treturn path.Join(c.workinkDir, relPath)\n}", "func (lbase *Logbase) MakeLogfileRelPath(fnum LBUINT) string {\n\treturn MakeLogfileName(fnum, lbase.config.LOGFILE_NAME_EXTENSION)\n}", "func UserConfigDir() (string, error)", "func expandFilePath(filePath string, baseDir string) string {\n\tif !strings.HasPrefix(filePath, \"/\") {\n\t\tfilePath = path.Join(baseDir, filePath)\n\t}\n\treturn filePath\n}", "func sanitize(gopath, filename string) string {\n\tprefix := gopath + string(os.PathSeparator) + \"src\" + string(os.PathSeparator)\n\treturn filepath.ToSlash(strings.TrimPrefix(filename, prefix))\n}", "func resolveFilePath(rawUrl string) (string, error) {\n\tu, err := url.Parse(rawUrl)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn filepath.Abs(filepath.Join(u.Host, u.Path))\n}", "func makeRelative(child, parent string) string {\n\treturn strings.TrimPrefix(child, dirName(parent))\n}", "func ConfigFile(inp string) string {\n\tif inp != \"\" {\n\t\tpath := ExpandUser(inp)\n\t\tif FileExists(path) {\n\t\t\treturn path\n\t\t}\n\t}\n\n\tif env := os.Getenv(\"DOLA_CONFIG\"); env != \"\" {\n\t\tpath := ExpandUser(env)\n\t\tif FileExists(path) {\n\t\t\treturn path\n\t\t}\n\t}\n\n\tif path := ExpandUser(\"~/.dola/config.json\"); FileExists(path) {\n\t\treturn path\n\t}\n\n\treturn \"\"\n}", "func (c Calendars) configPath() (string, error) {\n\tconfDir, err := configDirectory()\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn filepath.Join(confDir, \"calendars.txt\"), nil\n}", "func AbsolutePath(value string) (string, error) {\n\treTilde := regexp.MustCompile(`^~`)\n\tvalue = reTilde.ReplaceAllString(value, os.Getenv(\"HOME\"))\n\tfilename, err := filepath.Abs(value)\n\tif err != nil {\n\t\treturn \"\", errors.Wrapf(err, \"error getting absolute path for %s\", value)\n\t}\n\treturn filename, nil\n}", "func FullPath(path string) string {\n\treturn filepath.Clean(Config.Get(\"configuration\", \"local_path\") + SLASH + path)\n}", "func (r Ref) AbsPath(rootDir string) string {\n\tpath := filepath.Join(rootDir, r.GetPath())\n\tpath, _ = filepath.Abs(path)\n\tpath = filepath.ToSlash(path)\n\treturn path\n}", "func baseDir(filename string) string {\n\treturn filepath.Clean(filepath.Dir(filename))\n}", "func changeFileOwnership(config *Configuration) error {\n\tif config.UserName == \"\" {\n\t\treturn errors.New(\"no username supplied in config\")\n\t}\n\n\tusr, err := config.ext.lookupUser(config.UserName)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"user lookup\")\n\t}\n\n\tgrp, err := getGroup(config.ext, usr, config.GroupName)\n\tif err != nil {\n\t\treturn errors.WithMessage(err, \"group lookup\")\n\t}\n\n\tif err := chownAll(config, usr, grp); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func (t *Team) AbsoluteURL(path string) string {\n\treturn fmt.Sprintf(\"%s%s\", t.teamConfig.HTTPURL, path)\n}", "func appConfigFilePaths(ctx context.Context) (paths []string) {\n\tif p := flag.GetAppConfigFilePath(ctx); p != \"\" {\n\t\tpaths = append(paths, p, filepath.Join(p, app.DefaultConfigFileName))\n\n\t\treturn\n\t}\n\n\twd := state.WorkingDirectory(ctx)\n\tpaths = append(paths, filepath.Join(wd, app.DefaultConfigFileName))\n\n\treturn\n}", "func (rp RelPath) Abs(x *X) string {\n\treturn filepath.Join(x.Root, string(rp))\n}", "func autoAppDir(prefix, suffix string) string {\n\tvar dir string\n\tsuffix = SafeFileName(suffix)\n\tprefix = SafeFileName(prefix)\n\tif prefix == \"\" {\n\t\tfpath := options.GetExecFileByPid(os.Getpid())\n\t\tpwd := fpath\n\t\tfpath = path.Base(path.Dir(strings.TrimRight(pwd, \"/\")))\n\t\tif fpath == \"bin\" || fpath == \"sbin\" {\n\t\t\tdir = pwd + \"/../\" + suffix + \"/\"\n\t\t} else {\n\t\t\tdir = pwd + \"/./\" + suffix + \"/\"\n\t\t}\n\t} else {\n\t\tdir = prefix + \"/./\" + suffix + \"/\"\n\t}\n\treturn path.Clean(dir)\n}", "func makeUrlAbsolute(imgSrc, baseUrl string) (string, error) {\n\n\timgUrl, err := url.Parse(imgSrc)\n\n\tif err != nil {\n\t//\tprf(\"Error parsing URL: %s %v %s\", imgSrc, imgUrl, err)\n\t\treturn \"\", err\n\t}\n\n\tif !imgUrl.IsAbs() {\n\t//\tpr(\"Image URL is not absolute\")\n\n\t\tbaseUrl, err := url.Parse(baseUrl)\n\t\tif err != nil {\n\t//\t\tprf(\"Error parsing base URL: %s %s\", linkUrl.Url, err)\n\t\t\treturn \"\", err\n\t\t}\n\n\t\timgUrl := baseUrl.ResolveReference(imgUrl)\n\n\t\t//prVal(\"Fixed Image Url:\", imgUrl)\n\n\t\timgSrc = imgUrl.String()\n\n\t\t//prVal(\"Fixed imgSrc:\", imgSrc)\n\t}\n\n\treturn imgSrc, nil\n}", "func absPath(name string) (string, error) {\n\tif name == \"\" {\n\t\treturn os.Getwd()\n\t}\n\n\tif filepath.IsAbs(name) {\n\t\treturn name, nil\n\t}\n\n\tif len(name) > 0 && name[0] == '~' && (len(name) == 1 || name[1] == '/' || name[1] == '\\\\') {\n\t\treturn expendHome(name)\n\t}\n\n\treturn filepath.Abs(name)\n}", "func makeAbsPath(val, pageURL string) (string, error) {\n\tif path.IsAbs(val) {\n\t\tparsedURL, err := url.Parse(val)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"error parsing path: %v\\n\", err)\n\t\t}\n\t\tbase, err := url.Parse(pageURL)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"error parsing URL: %v\\n\", err)\n\t\t}\n\t\treturn base.ResolveReference(parsedURL).String(), nil\n\t}\n\treturn val, nil\n}", "func fileInfosToAbs(t *testing.T, fileInfos []bufmoduleref.FileInfo) []bufmoduleref.FileInfo {\n\tif fileInfos == nil {\n\t\treturn nil\n\t}\n\tnewFileInfos := make([]bufmoduleref.FileInfo, len(fileInfos))\n\tfor i, fileInfo := range fileInfos {\n\t\tabsExternalPath, err := normalpath.NormalizeAndAbsolute(fileInfo.ExternalPath())\n\t\trequire.NoError(t, err)\n\t\tnewFileInfo := bufmoduletesting.NewFileInfo(\n\t\t\tt,\n\t\t\tfileInfo.Path(),\n\t\t\tabsExternalPath,\n\t\t\tfileInfo.IsImport(),\n\t\t\tnil,\n\t\t\t\"\",\n\t\t)\n\t\tnewFileInfos[i] = newFileInfo\n\t}\n\treturn newFileInfos\n}", "func (fs *BaseFs) MakePathRelative(filename string) (string, string) {\n\tfor _, sfs := range fs.FileSystems() {\n\t\tif sfs.Contains(filename) {\n\t\t\tif s, found := sfs.MakePathRelative(filename); found {\n\t\t\t\treturn sfs.Name, s\n\t\t\t}\n\t\t}\n\t}\n\t// May be a static file.\n\tif s := fs.MakeStaticPathRelative(filename); s != \"\" {\n\t\treturn files.ComponentFolderStatic, s\n\t}\n\t// Fall back to relative to the working dir.\n\tif strings.HasPrefix(filename, fs.workingDir) {\n\t\treturn \"\", strings.TrimPrefix(filename, fs.workingDir)\n\t}\n\treturn \"\", \"\"\n}", "func defaultPath(conf *string, dir, file string) {\n\tif *conf == \"\" {\n\t\t*conf = path.Join(dir, file)\n\t}\n}", "func (c *Config) UpdateRelativePaths() {\n\tconfigDir := filepath.Dir(c.Path)\n\n\tfor name := range c.VM {\n\t\tc.VM[name].updateRelativePaths(configDir, name)\n\t}\n}", "func IsAbsolute(pth string) bool {\n\tif len(pth) > 0 {\n\t\tif pth[0] == '/' {\n\t\t\treturn true\n\t\t}\n\t\tif runtime.GOOS == \"windows\" && len(pth) > 2 {\n\t\t\tif pth[1] == ':' {\n\t\t\t\tif pth[2] == '/' || pth[2] == '\\\\' {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn false\n}", "func NormalizeAndAbsolute(path string) (string, error) {\n\tabsPath, err := filepath.Abs(Unnormalize(path))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn Normalize(absPath), nil\n}", "func AbsPath(input string) string {\n\tp := input\n\tif p == \"\" {\n\t\treturn \"\"\n\t}\n\tif p[0] == '~' {\n\t\tp = path.Join(homeDir, p[1:])\n\t}\n\tmatches, _ := filepath.Glob(p)\n\tif len(matches) != 0 {\n\t\tp = matches[0]\n\t}\n\tabs, _ := filepath.Abs(p)\n\treturn abs\n}", "func (conf FileConfiguration) BaseURL() string {\n\treturn conf.parsedYaml.BaseURL\n}", "func getCfgPath(name string) string {\n\treturn configDir + \"/\" + name + \".conf\"\n}", "func ExampleGetAbsPath() {\n\tpathArr := []string{\"/\", \"/var/log/boot.log\", \"config.json\", \"./example/test.go\"}\n\n\tfor _, v := range pathArr {\n\t\tif p, err := pathhelper.GetAbsPath(v); err != nil {\n\t\t\tfmt.Fprintf(os.Stderr, \"GetAbsPath(%v) error: %v\\n\", v, err)\n\t\t} else {\n\t\t\tfmt.Fprintf(os.Stderr, \"%v\\n\", p)\n\t\t}\n\t}\n\t// Output:\n}", "func sanitizedName(filename string) string {\n\tif len(filename) > 1 && filename[1] == ':' &&\n\t\truntime.GOOS == \"windows\" {\n\t\tfilename = filename[2:]\n\t}\n\tfilename = filepath.ToSlash(filename)\n\tfilename = strings.TrimLeft(filename, \"/.\")\n\treturn strings.Replace(filename, \"../\", \"\", -1)\n}", "func Sanitize(cfg *config.Config) {\n\t// sanitize config\n\tif cfg.HTTP.Root != \"/\" {\n\t\tcfg.HTTP.Root = strings.TrimSuffix(cfg.HTTP.Root, \"/\")\n\t}\n}", "func Abs(path string) string {\n\t_, filename, _, ok := runtime.Caller(1)\n\n\tif !ok {\n\t\treturn \"\"\n\t}\n\n\treturn filepath.Join(filepath.Dir(filename), path)\n}", "func toAbsoluteURI(uri string, base *nurl.URL) string {\n\tif uri == \"\" || base == nil {\n\t\treturn \"\"\n\t}\n\n\t// If it is hash tag, return as it is\n\tif uri[0:1] == \"#\" {\n\t\treturn uri\n\t}\n\n\t// If it is already an absolute URL, return as it is\n\ttempURI, err := nurl.ParseRequestURI(uri)\n\tif err == nil && len(tempURI.Scheme) > 0 {\n\t\treturn uri\n\t}\n\n\t// Otherwise, put it as path of base URL\n\tnewURI := nurl.URL(*base)\n\tnewURI.Path = uri\n\n\treturn newURI.String()\n}", "func (ie *InitEnvironment) GetAbsFilePath(filename string) string {\n\t// Here IsAbs should be enough but unfortunately it doesn't handle absolute paths starting from\n\t// the current drive on windows like `\\users\\noname\\...`. Also it makes it more easy to test and\n\t// will probably be need for archive execution under windows if always consider '/...' as an\n\t// absolute path.\n\tif filename[0] != '/' && filename[0] != '\\\\' && !filepath.IsAbs(filename) {\n\t\tfilename = filepath.Join(ie.CWD.Path, filename)\n\t}\n\tfilename = filepath.Clean(filename)\n\tif filename[0:1] != afero.FilePathSeparator {\n\t\tfilename = afero.FilePathSeparator + filename\n\t}\n\treturn filename\n}", "func homeConfigurationFilename(ext string) string {\n\treturn filepath.Join(homeDir(), \"iris\"+ext)\n}", "func ExpandFiles(args []string) {\n\twd, err := os.Getwd()\n\thelheim.Check(err)\n\tfor i, arg := range args {\n\t\tif !path.IsAbs(arg) && FileExists(arg) {\n\t\t\targs[i] = path.Clean(wd + \"/\" + arg)\n\t\t}\n\t}\n}" ]
[ "0.6721338", "0.61070424", "0.60937524", "0.5970152", "0.5803267", "0.57943124", "0.5765189", "0.5707019", "0.55815166", "0.5573749", "0.5568223", "0.5561989", "0.5475165", "0.54627305", "0.5397373", "0.5357626", "0.5349042", "0.527192", "0.5239704", "0.5228473", "0.52187955", "0.51792073", "0.51405805", "0.513679", "0.51333004", "0.512466", "0.5119571", "0.5111594", "0.50961244", "0.50958204", "0.5059048", "0.5058711", "0.50474584", "0.50401884", "0.5039621", "0.50329036", "0.5025394", "0.50103515", "0.5008071", "0.5000273", "0.49978408", "0.49501958", "0.49491608", "0.49414173", "0.49317688", "0.49316296", "0.49244398", "0.49228123", "0.49174875", "0.4899691", "0.48987046", "0.48903412", "0.48760328", "0.48711258", "0.4862806", "0.48618016", "0.4852492", "0.48511285", "0.4849313", "0.48432466", "0.47993994", "0.47985584", "0.47811997", "0.47773534", "0.47749165", "0.47631195", "0.4760218", "0.47464183", "0.474234", "0.47381538", "0.47355026", "0.47310796", "0.4727829", "0.47232053", "0.4723023", "0.47206697", "0.4719029", "0.47037813", "0.4699391", "0.46953356", "0.4692754", "0.46868122", "0.4676771", "0.46687895", "0.4668147", "0.46672955", "0.46651128", "0.46635017", "0.46528023", "0.46507058", "0.46463293", "0.46455505", "0.4626619", "0.46141228", "0.4609257", "0.4603215", "0.46006796", "0.4600485", "0.45904532", "0.4587329" ]
0.6563512
1
closeListener closes the listening endpoint
func (s *Server) closeListener() error { s.mutex.Lock() defer s.mutex.Unlock() if s.listener == nil { msg := "Stop: listener was already closed" log.Debugf(msg) return errors.New(msg) } _, port, err := net.SplitHostPort(s.listener.Addr().String()) if err != nil { return err } err = s.listener.Close() if err != nil { log.Debugf("Stop: failed to close listener on port %s: %s", port, err) return err } log.Debugf("Stop: successfully closed listener on port %s", port) s.listener = nil return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (p *Listener) Close() error { return p.Listener.Close() }", "func (l *listener) Close() error {\n\tl.cancel()\n\tl.host.RemoveStreamHandler(l.tag)\n\treturn nil\n}", "func (l *Listener) Close() error {\n\treturn nil\n}", "func (l *Listener) Close() error {\n\tif err := l.lowerListener.Close(); err != nil {\n\t\treturn err\n\t}\n\n\tl.endpoint = \"\"\n\tl.rcvBufSize = 0\n\tl.sndBufSize = 0\n\treturn nil\n}", "func (r *RPCIngressGateway) CloseListener(lisID *uint16, _ *struct{}) (err error) {\n\tdefer rpcutil.LogCall(r.log, \"CloseListener\", lisID)(nil, &err)\n\n\tlis, err := r.popListener(*lisID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn lis.Close()\n}", "func (s *Listener) Close() error {\n\treturn s.Listener.Close()\n}", "func (tl *testListener) Close() error {\n\treturn nil\n}", "func (p *Listener) Close() error {\n\treturn p.Listener.Close()\n}", "func (l *Listener) Close() error {\n\treturn l.Listener.Close()\n}", "func (l *grpcListener) Close() error {\n\tl.listenerCtxCancel()\n\treturn nil\n}", "func (this *udp_listener) Close() error {\n\t/*\n\t\tif err := this.conn.Close(); nil != err {\n\t\t\treturn err\n\t\t}\n\t\tclose(this.signal)\n\t*/\n\tthis.stoped = true\n\treturn nil\n}", "func (s *SimpleServer) Close() error {\n\tif s.Listener == nil {\n\t\treturn nil\n\t}\n\treturn s.Listener.Close()\n}", "func (s *server) Close() error {\n\treturn s.listener.Close()\n}", "func (l *Listener) Close() {\n\tclose(l.Shutdown)\n\tl.Shutdown = nil\n}", "func (listener *Listener) Close() error {\n\treturn listener.base.Close()\n}", "func (s *Server) Close() error {\n\tif s.Listener != nil {\n\t\terr := s.Listener.Close()\n\t\ts.Listener = nil\n\t\treturn err\n\t}\n\treturn nil\n}", "func (l *Listener) Close() {\n\tclose(l.shutdownChan)\n\tl.tcpListener.Close()\n}", "func (proxy *UnixProxy) Close() { proxy.listener.Close() }", "func (l *MockListener) Close() error {\n\treturn nil\n}", "func (l *listener) Close() error {\n\tl.L.Lock()\n\terr := l.err\n\tif err == nil {\n\t\tl.err = ErrListenerClosed\n\t}\n\tl.L.Unlock()\n\tl.Broadcast()\n\n\tif err == nil {\n\t\tl.stopListening()\n\t}\n\n\treturn errors.New(\"Not implemented\")\n}", "func (r *Listener) Close() error {\n\tr.portsLk.Lock()\n\tdefer r.portsLk.Unlock()\n\n\tfor n, port := range r.ports {\n\t\tif err := port.Close(); err != nil {\n\t\t\tr.ports = r.ports[n:] // drop any port that was successfully closed\n\t\t\treturn err\n\t\t}\n\t}\n\tr.ports = nil\n\treturn nil\n}", "func (s *Server) Close() {\n\ts.running = false\n\tif s.listener != nil {\n\t\ts.listener.Close()\n\t\ts.listener = nil\n\t}\n}", "func (g *Gateway) Close() error {\n\tlog.Infof(\"server at %s terminating...\", g.listener.Addr())\n\treturn g.listener.Close()\n}", "func (v *vsockListener) Close() error {\n\t// Note this won't cause the Accept to unblock.\n\treturn unix.Close(v.fd)\n}", "func (ln *Listener) Close() error {\n\treturn ln.tcpLn.Close()\n}", "func (l *SensorListener) Close() error {\n return l.conn.Close()\n}", "func (l *listener) Close() error {\n\tl.once.Do(func() {\n\t\tl.lock.Lock()\n\t\tl.closed = true\n\t\tl.lock.Unlock()\n\t\tif l.listener != nil {\n\t\t\t_ = l.listener.Close()\n\t\t}\n\t\tl.hs.Close()\n\t})\n\treturn nil\n}", "func (s *TCPServer) Close() {\n\ts.listener.Close()\n}", "func (s *Server) Close() error {\n\treturn s.listener.Close()\n}", "func (s *Server) Close() error {\n\treturn s.listener.Close()\n}", "func (s *HTTPSink) Close() error {\n\treturn s.Listener.Close()\n}", "func (server *TCPServer) Close() {\n\tif server.listener != nil {\n\t\tlistener := server.listener\n\t\tserver.listener = nil\n\t\tlistener.Close()\n\t}\n}", "func (p *Proxy) Close() error {\n\treturn p.listener.Close()\n}", "func (l *Listener) Close() error {\n\t// XXX - should close on M3UA layer.\n\treturn l.sctpListener.Close()\n}", "func (bn *BasicNotifiee) ListenClose(n net.Network, addr ma.Multiaddr) {\n\tglog.V(4).Infof(\"Notifiee - Close: %v\", addr)\n}", "func (l *WebListener) Close() error {\n\tdefer l.cancel()\n\treturn l.cfg.Listener.Close()\n}", "func (listener *Listener) Close() error {\n\tlistener.close()\n\n\tvar err error\n\tlistener.connections.Range(func(key, value interface{}) bool {\n\t\tconn := value.(*Conn)\n\t\tif closeErr := conn.Close(); err != nil {\n\t\t\terr = fmt.Errorf(\"error closing conn %v: %v\", conn.addr, closeErr)\n\t\t}\n\t\treturn true\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tif err := listener.conn.Close(); err != nil {\n\t\treturn fmt.Errorf(\"error closing UDP listener: %v\", err)\n\t}\n\treturn nil\n}", "func (notifee *Notifee) ListenClose(network.Network, multiaddr.Multiaddr) {}", "func (l *Listener) Close() error {\n\tif l.close() {\n\t\treturn nil\n\t}\n\treturn ErrClientClosed\n}", "func (streamLayer *StreamLayer) Close() error {\n\treturn streamLayer.listener.Close()\n}", "func (l *Listener) Close() error {\n\tremoveListener(l)\n\n\tl.Lock()\n\tdefer l.Unlock()\n\n\tif l.connections == nil {\n\t\treturn &net.OpError{\n\t\t\tOp: \"close\",\n\t\t\tNet: \"fake\",\n\t\t\tSource: l.Addr(),\n\t\t\tErr: fmt.Errorf(\"closed already\"),\n\t\t}\n\t}\n\n\tclose(l.connections)\n\tl.connections = nil\n\treturn nil\n}", "func (s *Server) Close() {\n\ts.rtspListener.Close()\n\tclose(s.shutdown)\n}", "func (s *Server) Close() {\n\ts.rwlock.Lock()\n\tdefer s.rwlock.Unlock()\n\n\tif s.listener != nil {\n\t\terr := s.listener.Close()\n\t\tterror.Log(errors.Trace(err))\n\t\ts.listener = nil\n\t}\n\tmetrics.ServerEventCounter.WithLabelValues(metrics.EventClose).Inc()\n}", "func (lsn *listenerV2) Close() error {\n\treturn lsn.StopOnce(\"VRFListenerV2\", func() error {\n\t\tclose(lsn.chStop)\n\t\t// wait on the request handler, log listener, and head listener to stop\n\t\tlsn.wg.Wait()\n\t\treturn lsn.reqLogs.Close()\n\t})\n}", "func (l *Listener) Close() error {\n\tdefer log.Debugf(\"trafficshape: closed read/write buckets and connection\")\n\n\tl.rb.Close()\n\tl.wb.Close()\n\n\treturn l.Listener.Close()\n}", "func (ml *ManagedListener) Close() {\n\tif ml != nil {\n\t\tdefer trace.Tracer.ScopedTrace()()\n\t\tdefer ml.Monitor()()\n\t\tif ml.Canceled != nil {\n\t\t\tclose(ml.Canceled)\n\t\t\tml.Canceled = make(chan struct{})\n\t\t}\n\t\tif ml.Listener != nil {\n\t\t\tif err := ml.Listener.Close(); err != nil {\n\t\t\t\tlog.Println(\"Error closing listener\", ml.Listener)\n\t\t\t}\n\t\t\tvar pipes = []*pipe.Pipe{}\n\t\t\tfor pipe := range ml.Pipes {\n\t\t\t\tgo pipe.Close()\n\t\t\t\tpipes = append(pipes, pipe)\n\t\t\t}\n\t\t\tfor _, pipe := range pipes {\n\t\t\t\tdelete(ml.Pipes, pipe)\n\t\t\t}\n\t\t}\n\t\tml.RemoveExternalIP()\n\t}\n}", "func (i *invoker) Close() error {\n\ti.cancelPF()\n\n\t// Closing the local listener effectively closes the gRPC connection\n\tif err := i.listener.Close(); err != nil {\n\t\ti.conn.Close() // If we fail to close the listener, explicitly close the gRPC connection and ignore any error\n\t\treturn fmt.Errorf(\"failed to close local tcp port listener: %w\", err)\n\t}\n\n\treturn nil\n}", "func Close() {\n\tif quit != nil {\n\t\t// must close quit first\n\t\tclose(quit)\n\t\tlistener.Close()\n\t\tlistener = nil\n\t}\n}", "func (ln *StoppableListener) Close() error {\n\tfor _, v := range ln.listeners {\n\t\tif err := v.Close(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (server *SServer) Close() error {\n\tvar err error\n\tfor _, listener := range server.listeners {\n\t\tif err_ := listener.Close(); err_ != nil && err == nil {\n\t\t\terr = err_\n\t\t}\n\t}\n\treturn err\n}", "func (s *MaplAdapter) Close() error {\n\tif s.server != nil {\n\t\ts.server.GracefulStop()\n\t}\n\n\tif s.listener != nil {\n\t\t_ = s.listener.Close()\n\t}\n\n\treturn nil\n}", "func (s *testDoQServer) Shutdown() {\n\t_ = s.listener.Close()\n}", "func (n Node) Close() error {\n\treturn n.listener.Close()\n}", "func (s *TCPServer) Close() error {\n\treturn s.listener.Close()\n}", "func (s *Server) Close() error {\n\ts.mtx.Lock()\n\tdefer s.mtx.Unlock()\n\treturn s.listener.Close()\n}", "func (s *Server) Close() error {\n\ts.cancelMx.Lock()\n\tif s.cancel != nil {\n\t\ts.cancel()\n\t}\n\ts.cancelMx.Unlock()\n\n\treturn s.listener.Close()\n}", "func (t *TCPStreamLayer) Close() (err error) {\n\treturn t.listener.Close()\n}", "func (l *TCPListener) Close() error {\n\tif l == nil || l.fd == nil {\n\t\treturn syscall.EINVAL\n\t}\n\treturn l.fd.Close()\n}", "func (l *listener) Close() error {\n\tif l.unsubscribeLogs != nil {\n\t\tl.unsubscribeLogs()\n\t}\n\tl.runs.Range(func(key, runCloserChannelIf interface{}) bool {\n\t\trunCloserChannel, _ := runCloserChannelIf.(chan struct{})\n\t\tclose(runCloserChannel)\n\t\treturn true\n\t})\n\tl.runs = sync.Map{}\n\tl.shutdownWaitGroup.Wait()\n\treturn nil\n}", "func (bs *Listener) Close() {\n\tbs.mu.Lock()\n\n\tif bs.isClosed {\n\t\tbs.mu.Unlock()\n\t\treturn\n\t}\n\n\tclose(bs.newDataNotifications)\n\tbs.isClosed = true\n\n\tbs.mu.Unlock()\n}", "func (t *raftLayer) Close() (err error) {\n\treturn t.listener.Close()\n}", "func (s *Server) Close() error {\n\ts.mutex.Lock()\n\tdefer s.mutex.Unlock()\n\n\ts.closed = true\n\n\tvar err error\n\tfor ln := range s.listeners {\n\t\tif cerr := (*ln).Close(); cerr != nil && err == nil {\n\t\t\terr = cerr\n\t\t}\n\t}\n\treturn err\n}", "func (s *MockServer) Close() {\n\ts.Listener.Close()\n\ts.CloseClientConnections()\n}", "func (s *Socket) Close() error {\n\terr := s.listener.Close()\n\tif err != nil {\n\t\ts.Logger.Printf(\"cannot close socket: %s\\n\", err.Error())\n\t}\n\treturn os.Remove(s.socketPath)\n}", "func (l *EventListener) Close() {\n\tl.unregister <- l\n}", "func (s *Server) Close() error {\n\tctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)\n\tdefer cancel()\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t// Perform a best-effort final flush.\n\t\tif s.logtail != nil {\n\t\t\ts.logtail.Shutdown(ctx)\n\t\t}\n\t\tif s.logbuffer != nil {\n\t\t\ts.logbuffer.Close()\n\t\t}\n\t}()\n\n\tif _, isMemStore := s.Store.(*mem.Store); isMemStore && s.Ephemeral && s.lb != nil {\n\t\twg.Add(1)\n\t\tgo func() {\n\t\t\tdefer wg.Done()\n\t\t\t// Perform a best-effort logout.\n\t\t\ts.lb.LogoutSync(ctx)\n\t\t}()\n\t}\n\n\tif s.netstack != nil {\n\t\ts.netstack.Close()\n\t\ts.netstack = nil\n\t}\n\tif s.shutdownCancel != nil {\n\t\ts.shutdownCancel()\n\t}\n\tif s.lb != nil {\n\t\ts.lb.Shutdown()\n\t}\n\tif s.linkMon != nil {\n\t\ts.linkMon.Close()\n\t}\n\tif s.dialer != nil {\n\t\ts.dialer.Close()\n\t}\n\tif s.localAPIListener != nil {\n\t\ts.localAPIListener.Close()\n\t}\n\tif s.localAPITCPListener != nil {\n\t\ts.localAPITCPListener.Close()\n\t}\n\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\tfor _, ln := range s.listeners {\n\t\tln.Close()\n\t}\n\ts.listeners = nil\n\n\twg.Wait()\n\treturn nil\n}", "func (server *TaggedServer) Close() {\n\tserver.mutex.Lock()\n\tdefer server.mutex.Unlock()\n\tserver.done = true\n\tserver.listener.Close()\n}", "func (l *pipeListener) Close() error {\n\tclose(l.closer)\n\n\treturn nil\n}", "func (l *listener) Close() error {\n\tvar err error\n\tl.doneOnce.Do(func() {\n\t\tl.accepting.Store(false)\n\t\tclose(l.doneCh)\n\n\t\tl.connLock.Lock()\n\t\t// Close unaccepted connections\n\tlclose:\n\t\tfor {\n\t\t\tselect {\n\t\t\tcase c := <-l.acceptCh:\n\t\t\t\tclose(c.doneCh)\n\t\t\t\t// If we have an alternate identifier, remove it from the connection\n\t\t\t\t// map.\n\t\t\t\tif id := c.id.Load(); id != nil {\n\t\t\t\t\tdelete(l.conns, id.(string)) //nolint:forcetypeassert\n\t\t\t\t}\n\t\t\t\t// If we haven't already removed the remote address, remove it\n\t\t\t\t// from the connection map.\n\t\t\t\tif !c.rmraddr.Load() {\n\t\t\t\t\tdelete(l.conns, c.raddr.String())\n\t\t\t\t\tc.rmraddr.Store(true)\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\tbreak lclose\n\t\t\t}\n\t\t}\n\t\tnConns := len(l.conns)\n\t\tl.connLock.Unlock()\n\n\t\tl.connWG.Done()\n\n\t\tif nConns == 0 {\n\t\t\t// Wait if this is the final connection.\n\t\t\tl.readWG.Wait()\n\t\t\tif errClose, ok := l.errClose.Load().(error); ok {\n\t\t\t\terr = errClose\n\t\t\t}\n\t\t} else {\n\t\t\terr = nil\n\t\t}\n\t})\n\n\treturn err\n}", "func (w *BaseWebsocketClient) OnClose(wasClean bool, code int, reason string) {}", "func (cli *Client) Close() {\n\tcli.ref.Call(\"close\")\n\tcli.listener.Release()\n}", "func (d *destination) listenForClose() {\n\tvar empty empty.Empty\n\terr := d.client.RecvMsg(&empty)\n\tif err == nil || err == io.EOF {\n\t\td.logger.Debug(\"disconnected from destination\")\n\t\td.statsd.Count(\n\t\t\t\"veneur_proxy.forward.disconnect\", 1, []string{\"error:false\"}, 1.0)\n\t} else {\n\t\td.logger.WithError(err).Error(\"disconnected from destination\")\n\t\td.statsd.Count(\n\t\t\t\"veneur_proxy.forward.disconnect\", 1, []string{\"error:true\"}, 1.0)\n\t}\n\n\td.cancel()\n}", "func (socket *SocketTLS) Close() {\n\t// Fire closing event\n\tlog.Noteln(socket.name + \" closing. Port \" + socket.port)\n\tsocket.eventChan <- SocketEvent{\n\t\tName: \"close\",\n\t\tData: nil,\n\t}\n\n\t// Close socket\n\tsocket.listen.Close()\n}", "func (w *Watcher) Close() {\n\t_ = w.Client.Close()\n}", "func cleanup(listeners []net.Listener, wg *sync.WaitGroup) error {\n\tlog.Println(\"Cleaning up.....\")\n\tfor _, listener := range listeners {\n\t\tlog.Printf(\"closing server at address %s ....\", listener.Addr().String())\n\t\tif err := listener.Close(); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlog.Printf(\"closed server at address %s\", listener.Addr().String())\n\t\twg.Done()\n\t}\n\treturn nil\n}", "func (l *LifeCycle) Close() error {\n\tl.closeMu.Lock()\n\tif l.closed {\n\t\tl.closeMu.Unlock()\n\t\treturn errAlreadyClosed\n\t}\n\n\tl.closed = true\n\tc := make(chan struct{})\n\tl.closeChan <- c\n\tl.closeMu.Unlock()\n\n\tl.log.Info(\"waiting for listener to exit\")\n\t<-c\n\treturn nil\n}", "func (c *ClientConn) Close() error {\n\tif c.state == clientConnStatePlay || c.state == clientConnStateRecord {\n\t\tclose(c.backgroundTerminate)\n\t\t<-c.backgroundDone\n\n\t\tc.Do(&base.Request{\n\t\t\tMethod: base.Teardown,\n\t\t\tURL: c.streamURL,\n\t\t\tSkipResponse: true,\n\t\t})\n\t}\n\n\tfor _, l := range c.udpRTPListeners {\n\t\tl.close()\n\t}\n\n\tfor _, l := range c.udpRTCPListeners {\n\t\tl.close()\n\t}\n\n\terr := c.nconn.Close()\n\treturn err\n}", "func (sl *StoppableListener) Close() error {\n\tselect {\n\tcase <-sl.stop:\n\tdefault:\n\t\tclose(sl.stop)\n\t}\n\treturn nil\n}", "func (_m *WebSocketServer) Close() {\n\t_m.Called()\n}", "func (a *TCPAcceptor) Stop() {\n\ta.running = false\n\ta.listener.Close()\n}", "func (a *API) Close() error {\n\tlog.Debug().Msg(\"shutting down server\")\n\treturn a.e.Close()\n}", "func (e *EthernetInterface) Close() {\n\tsyscall.Close(e.socket)\n}", "func (a *AbstractSessionChannelHandler) OnClose() {}", "func (ml *MockMonitorListener) Close() {\n}", "func (s *ListenerServerStream) Close() error {\n\tvar err error\n\tif s.conn == nil {\n\t\treturn nil\n\t}\n\tif err = s.conn.WriteControl(\n\t\twebsocket.CloseMessage,\n\t\twebsocket.FormatCloseMessage(websocket.CloseNormalClosure, \"server closing connection\"),\n\t\ttime.Now().Add(time.Second),\n\t); err != nil {\n\t\treturn err\n\t}\n\treturn s.conn.Close()\n}", "func (s *Service) Close() error {\n\tdefer s.diag.StoppedService()\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\t// If server is not set we were never started\n\tif s.server == nil {\n\t\treturn nil\n\t}\n\t// First turn off KeepAlives so that new connections will not become idle\n\ts.server.SetKeepAlivesEnabled(false)\n\t// Signal to manage loop we are stopping\n\tstopping := make(chan struct{})\n\ts.stop <- stopping\n\n\t// Next close the listener so no new connections can be made\n\terr := s.ln.Close()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t<-stopping\n\ts.wg.Wait()\n\ts.server = nil\n\treturn nil\n}", "func (s *Server) Close() {\n\ts.mu.Lock()\n\tdefer s.mu.Unlock()\n\n\tif s.closed {\n\t\treturn\n\t}\n\n\ts.closed = true\n\ts.Listener.Close()\n\ts.CloseClientConnections()\n}", "func (s *Server) Close() {\n\ts.mu.Lock()\n\tif !s.closed {\n\t\ts.closed = true\n\t\ts.Listener.Close()\n\t\tfor c, _ := range s.conns {\n\t\t\tc.Close()\n\t\t}\n\t}\n\ts.mu.Unlock()\n}", "func (s *Service) Close() error {\n\tif wait, err := func() (bool, error) {\n\t\ts.mu.Lock()\n\t\tdefer s.mu.Unlock()\n\n\t\tif s.closed() {\n\t\t\treturn false, nil // Already closed.\n\t\t}\n\t\tclose(s.done)\n\n\t\t// Close the listeners.\n\t\tif err := s.ln.Close(); err != nil {\n\t\t\treturn false, err\n\t\t}\n\t\tif err := s.httpln.Close(); err != nil {\n\t\t\treturn false, err\n\t\t}\n\n\t\tif s.batcher != nil {\n\t\t\ts.batcher.Stop()\n\t\t}\n\t\treturn true, nil\n\t}(); err != nil {\n\t\treturn err\n\t} else if !wait {\n\t\treturn nil\n\t}\n\ts.wg.Wait()\n\n\ts.mu.Lock()\n\ts.done = nil\n\ts.mu.Unlock()\n\n\treturn nil\n}", "func (l *Listener) Stop() {\n\tl.probes.SetReady(false)\n\tl.probes.SetAlive(false)\n\n\tclose(l.stop)\n\tl.wg.Wait()\n\tl.nc.Close()\n\tl.probes.Close()\n}", "func (r *router) Close() {\n\tlog.Info(\"Got OS shutdown signal, shutting down NSE webhook injector gracefully...\")\n\n\tif err := r.server.Shutdown(context.Background()); err != nil {\n\t\tlog.WithError(err).Error(\"Failed to shutting down the webhook server\")\n\t}\n}", "func (s *Server) Stop() {\n\tif s.listener != nil {\n\t\ts.listener.Close()\n\t}\n}", "func (s *Server) Stop() {\n\tif s.listener != nil {\n\t\ts.listener.Close()\n\t}\n}", "func (e *endpoint) Close() {\n\t// Tell dispatch goroutine to stop, then write to the eventfd so that\n\t// it wakes up in case it's sleeping.\n\tatomic.StoreUint32(&e.stopRequested, 1)\n\tsyscall.Write(e.rx.eventFD, []byte{1, 0, 0, 0, 0, 0, 0, 0})\n\n\t// Cleanup the queues inline if the worker hasn't started yet; we also\n\t// know it won't start from now on because stopRequested is set to 1.\n\te.mu.Lock()\n\tworkerPresent := e.workerStarted\n\te.mu.Unlock()\n\n\tif !workerPresent {\n\t\te.tx.cleanup()\n\t\te.rx.cleanup()\n\t}\n}", "func (s *Servers) Close() {\n\tfor _, srv := range s.listeners {\n\t\tif srv != nil {\n\t\t\tsrv.Close()\n\t\t}\n\t}\n}", "func (sw *Switcher) Close() {\n\tsw.Server.Close()\n}", "func (e *endpoint) Close() {\r\n\tclose(e.echoRequests)\r\n}", "func (_m *Socket) Close() {\n\t_m.Called()\n}", "func (l FakeListener) Close() error {\n\t// Highly unusual to close on the receiving side of a channel.\n\tfor {\n\t\tselect {\n\t\tcase _, open := <-l:\n\t\t\tif !open {\n\t\t\t\t// channel is already closed.\n\t\t\t\treturn &net.OpError{} // FIXME make use of closed Conn error\n\t\t\t}\n\t\tdefault:\n\t\t\t// channel should now be drained (any listeners unblocked)\n\t\t\tclose(l)\n\t\t\treturn nil\n\t\t}\n\t}\n\n}", "func (s *Sniffer) close() error {\n\tif err := unix.Close(s.fd); err != nil {\n\t\treturn fmt.Errorf(\"can't close sniffer socket: %w\", err)\n\t}\n\ts.fd = -1\n\treturn nil\n}" ]
[ "0.76574886", "0.7436512", "0.7430331", "0.74056816", "0.72484905", "0.7158901", "0.7139779", "0.71393967", "0.7135879", "0.70581454", "0.7038202", "0.6962311", "0.69576836", "0.6953876", "0.69376695", "0.6923832", "0.68982124", "0.689492", "0.6891801", "0.6855057", "0.6852129", "0.6848301", "0.68245304", "0.6815147", "0.6812792", "0.6797862", "0.6779625", "0.6775764", "0.6771828", "0.6771828", "0.6770749", "0.6767508", "0.67657197", "0.6729553", "0.6728536", "0.6706244", "0.66944", "0.66861606", "0.6656724", "0.6650381", "0.6639048", "0.66209334", "0.66200286", "0.66115963", "0.6600198", "0.6599608", "0.65925545", "0.654249", "0.6536248", "0.6528293", "0.6528147", "0.65084773", "0.6498603", "0.6487386", "0.6475951", "0.64540875", "0.64444685", "0.63737845", "0.6367251", "0.63489836", "0.6345662", "0.62862897", "0.6263072", "0.62557465", "0.6244995", "0.62322354", "0.6231881", "0.62198764", "0.6213221", "0.620868", "0.6171555", "0.6156007", "0.6120758", "0.6120663", "0.6087004", "0.606533", "0.6061623", "0.6044097", "0.6035236", "0.60351706", "0.60033405", "0.5980373", "0.5974285", "0.59679353", "0.59584606", "0.5950761", "0.59460866", "0.5935354", "0.5923925", "0.5902692", "0.59020644", "0.589793", "0.589793", "0.5886246", "0.5877957", "0.5871382", "0.58691615", "0.58529276", "0.5849506", "0.5846375" ]
0.7354897
4
Read the CRL from body of http response
func (s *Server) fetchCRL(r io.Reader) ([]byte, error) { crlSizeLimit := s.Config.CRLSizeLimit log.Debugf("CRL size limit is %d bytes", crlSizeLimit) crl := make([]byte, crlSizeLimit) crl, err := util.Read(r, crl) if err != nil { return nil, errors.WithMessage(err, fmt.Sprintf("Error reading CRL with max buffer size of %d", crlSizeLimit)) } return crl, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CRL(w http.ResponseWriter, r *http.Request) {\n\tcrlBytes, err := mustAuthority(r.Context()).GetCertificateRevocationList()\n\tif err != nil {\n\t\trender.Error(w, err)\n\t\treturn\n\t}\n\n\t_, formatAsPEM := r.URL.Query()[\"pem\"]\n\tif formatAsPEM {\n\t\tw.Header().Add(\"Content-Type\", \"application/x-pem-file\")\n\t\tw.Header().Add(\"Content-Disposition\", \"attachment; filename=\\\"crl.pem\\\"\")\n\n\t\t_ = pem.Encode(w, &pem.Block{\n\t\t\tType: \"X509 CRL\",\n\t\t\tBytes: crlBytes,\n\t\t})\n\t} else {\n\t\tw.Header().Add(\"Content-Type\", \"application/pkix-crl\")\n\t\tw.Header().Add(\"Content-Disposition\", \"attachment; filename=\\\"crl.der\\\"\")\n\t\tw.Write(crlBytes)\n\t}\n}", "func getRTVBody(c http.Client, url string) ([]byte, error) {\n\tlog.Printf(\"Fetching URL: %q\\n\", url)\n\tresp, err := c.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != 200 {\n\t\treturn nil, errors.Errorf(\"Non-200 response fetching %s, %+v\", url, resp)\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn body, nil\n}", "func serveCrl() {\n\thttp.HandleFunc(\"/crl\", func(w http.ResponseWriter, req *http.Request) {\n\t\tlog.Println(\"crl request come \", req.RemoteAddr)\n\t\tw.Header().Add(\"Content-Disposition\", \"attachment; filename=site.crl\")\n\t\tcrlBytes, err := CrlBytes()\n\t\tif err != nil {\n\t\t\tlog.Println(err)\n\t\t}\n\t\tw.Write(crlBytes)\n\t})\n\thttp.ListenAndServe(\":80\", nil)\n}", "func (c *CertInfo) CRL() *pkix.CertificateList {\n\treturn c.crl\n}", "func pathFetchCRL(b *backend) *framework.Path {\n\treturn &framework.Path{\n\t\tPattern: `crl(/pem)?`,\n\n\t\tCallbacks: map[logical.Operation]framework.OperationFunc{\n\t\t\tlogical.ReadOperation: b.pathFetchRead,\n\t\t},\n\n\t\tHelpSynopsis: pathFetchHelpSyn,\n\t\tHelpDescription: pathFetchHelpDesc,\n\t}\n}", "func ConnOCSPResponse(c *tls.Conn,) []byte", "func (reqParams *ReqParams) readValidate(resp *http.Response, w io.Writer) (*wrappedResp, error) {\n\tvar (\n\t\twresp = &wrappedResp{Response: resp, n: resp.ContentLength}\n\t\tcksumType = resp.Header.Get(apc.HdrObjCksumType)\n\t)\n\tif err := reqParams.checkResp(resp); err != nil {\n\t\treturn nil, err\n\t}\n\tn, cksum, err := cos.CopyAndChecksum(w, resp.Body, nil, cksumType)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif n != resp.ContentLength {\n\t\treturn nil, fmt.Errorf(\"read length (%d) != (%d) content-length\", n, resp.ContentLength)\n\t}\n\tif cksum == nil {\n\t\tif cksumType == \"\" {\n\t\t\treturn nil, errors.New(errNilCksum) // e.g., after fast-appending to a TAR\n\t\t}\n\t\treturn nil, fmt.Errorf(errNilCksumType, cksumType)\n\t}\n\n\t// compare\n\twresp.cksumValue = cksum.Value()\n\thdrCksumValue := wresp.Header.Get(apc.HdrObjCksumVal)\n\tif wresp.cksumValue != hdrCksumValue {\n\t\treturn nil, cmn.NewErrInvalidCksum(hdrCksumValue, wresp.cksumValue)\n\t}\n\treturn wresp, nil\n}", "func readBody(response *http.Response) ([]byte, error) {\n\tdefer response.Body.Close()\n\trtn, readErr := ioutil.ReadAll(response.Body)\n\n\treturn rtn, readErr\n}", "func ReadBody(resp *http.Response) (result []byte, err error) {\n\tdefer fs.CheckClose(resp.Body, &err)\n\treturn ioutil.ReadAll(resp.Body)\n}", "func (c *Client) readBody(resp *http.Response) ([]byte, error) {\n\tvar reader io.Reader = resp.Body\n\tswitch resp.Header.Get(\"Content-Encoding\") {\n\tcase \"\":\n\t\t// Do nothing\n\tcase \"gzip\":\n\t\treader = gzipDecompress(resp.Body)\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"bug: comm.Client.JSONCall(): content was send with unsupported content-encoding %s\", resp.Header.Get(\"Content-Encoding\"))\n\t}\n\treturn io.ReadAll(reader)\n}", "func getHttpResp(artDetails *jfauth.ServiceDetails, uri string) ([]byte, error) {\n\trtURL := (*artDetails).GetUrl() + uri\n\tjflog.Debug(\"Getting '\" + rtURL + \"' details ...\")\n\t//fmt.Printf(\"Fetching : %s\\n\", rtURL)\n\treq, err := http.NewRequest(\"GET\", rtURL, nil)\n\tif err != nil {\n\t\tjflog.Error(\"http.NewRequest failed\")\n\t}\n\treq.SetBasicAuth((*artDetails).GetUser(), (*artDetails).GetApiKey())\n\n\tresp, err := http.DefaultClient.Do(req)\n\tif err != nil {\n\t\tjflog.Error(\"http.DefaultClient.Do failed\")\n\t}\n\tdefer resp.Body.Close()\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tjflog.Error(\"ioutil.ReadAll call failed\")\n\t}\n\t//fmt.Printf(\"Fetching completed : %s\\n\", rtURL)\n\treturn body, err\n}", "func getBody(resp *http.Response) ([]byte, error) {\n\tbody, err := ioutil.ReadAll(resp.Body)\n\n\treturn body, err\n}", "func getBody(resp *http.Response) ([]byte, error) {\n\tbody, err := ioutil.ReadAll(resp.Body)\n\treturn body, err\n}", "func (*CaCrls) GetPath() string { return \"/api/objects/ca/crl/\" }", "func readBodyContent(r *ResponseReader, res *Response) (string, error) {\n\t// 1. Any response to a HEAD request and any response with a 1xx\n\t// (Informational), 204 (No Content), or 304 (Not Modified) status\n\t// code always has empty body\n\tif (res.StatusCode/100 == 1) || (res.StatusCode == 204) || (res.StatusCode == 304) ||\n\t\t(res.Request != nil && res.Request.Method == \"HEAD\") {\n\t\tres.Body = \"\"\n\t\treturn \"\", nil\n\t}\n\n\tcontentLen := res.GetContentLength()\n\ttransferEnc := res.GetTransferEncoding()\n\t// fmt.Printf(\"Len: %v, Encoding: %v\", contentLen, transferEnc)\n\n\tbody := \"\"\n\tvar err error\n\tif transferEnc == \"chunked\" || contentLen == -1 {\n\t\tbody, err = readChunkedBodyContent(r, res)\n\t} else {\n\t\tbody, err = readLimitedBodyContent(r, res, contentLen)\n\t}\n\n\tres.Body = body\n\tres.AppendRaw(body, false)\n\treturn body, err\n}", "func (dr downloadResponse) Body() io.ReadCloser {\n\treturn dr.rawResponse.Body\n}", "func body(resp *http.Response) ([]byte, error) {\n\tvar buf bytes.Buffer\n\t_, err := buf.ReadFrom(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn buf.Bytes(), nil\n}", "func ReadContentAndCloseBody(resp *http.Response) ([]byte, error) {\n\tdefer resp.Body.Close()\n\tdata, err := ioutil.ReadAll(resp.Body)\n\treturn data, err\n}", "func (c *Conn) readAuthResp() string {\n\tc.conn.SetReadDeadline(time.Now().Add(c.Config.Limits.CmdInput))\n\tc.lr.N = c.Config.Limits.AuthInput\n\tline, err := c.rdr.ReadLine()\n\tif err != nil || c.lr.N == 0 {\n\t\tc.state = sAbort\n\t\tc.log(\n\t\t\t\"!\",\n\t\t\t\"auth input abort %s err: %v\",\n\t\t\tfmtBytesLeft(\n\t\t\t\tc.Config.Limits.AuthInput,\n\t\t\t\tc.lr.N,\n\t\t\t),\n\t\t\terr,\n\t\t)\n\t\treturn \"\"\n\t}\n\tc.log(\"r\", line)\n\treturn line\n}", "func (s *SmartContract) getLC(APIstub shim.ChaincodeStubInterface, args []string) sc.Response {\n \n\t lcId := args[0];\n\t \n\t // if err != nil {\n\t // \treturn shim.Error(\"No Amount\")\n\t // }\n \n\t LCAsBytes, _ := APIstub.GetState(lcId)\n\t return shim.Success(LCAsBytes)\n }", "func (rpr ReadPathResponse) Body() io.ReadCloser {\n\treturn rpr.rawResponse.Body\n}", "func getRead(url string) ([]byte, error) {\n\tresp, err := http.Get(url)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdata, err := ioutil.ReadAll(resp.Body)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn data, resp.Body.Close()\n}", "func (self *Client) body(res *http.Response) (io.ReadCloser, error) {\n\tvar body io.ReadCloser\n\tvar err error\n\n\tif res.Header.Get(\"Content-Encoding\") == \"gzip\" {\n\t\tif body, err = gzip.NewReader(res.Body); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else {\n\t\tbody = res.Body\n\t}\n\n\treturn body, nil\n}", "func (o *DownloadTLSCertificateReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDownloadTLSCertificateOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewDownloadTLSCertificateUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 403:\n\t\tresult := NewDownloadTLSCertificateForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func Read(urlPath string) (*bytes.Buffer, error) {\n\n\tresponse, err := coreHTTP.Get(urlPath) // nolint:gosec\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif response != nil {\n\t\tdefer response.Body.Close()\n\t}\n\tvar buf bytes.Buffer\n\t_, err = io.Copy(&buf, response.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &buf, nil\n}", "func readResponseBody(resp *http.Response) (content []byte, err error) {\n\t// Get the content\n\tcontent, err = ioutil.ReadAll(resp.Body)\n\n\t// Reset the response body\n\trCloser := ioutil.NopCloser(bytes.NewBuffer(content))\n\tresp.Body = rCloser\n\n\treturn\n}", "func _getReal(url string) (content []byte, err error) {\n\tvar req *http.Request\n\treq, err = http.NewRequest(\"GET\", url, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tApplyHeaders(req, global.Conf.TranslatorAPIHeaders)\n\n\tvar resp *http.Response\n\tresp, err = http.DefaultClient.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\treturn ioutil.ReadAll(resp.Body)\n}", "func readDiscardBody(req *http.Request, resp *http.Response) int64 {\n\tif req.Method == http.MethodHead {\n\t\treturn 0\n\t}\n\n\tw := ioutil.Discard\n\tbytes, err := io.Copy(w, resp.Body)\n\tif err != nil {\n\t\tlog.Printf(\"reading HTTP response body: %v\", err)\n\t}\n\treturn bytes\n}", "func getHttpRespData(target string) ([]byte, error) {\n\tresp, err := http.Get(target)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to get from %s: %v\", target, err)\n\t}\n\n\tdata, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to read response body: %v\", err)\n\t}\n\n\treturn data, nil\n}", "func readPingResp(req *http.Request, resp *http.Response) (location string, bytes int64) {\n\tif req.Method == http.MethodHead {\n\t\tlog.Printf(\"no HTTP response body in a HEAD\")\n\t\treturn\n\t}\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tbytes = int64(len(body))\n\tif err != nil {\n\t\tlog.Println(\"readPingResp:\", err)\n\t\treturn\n\t}\n\n\tsb := string(body)\n\n\tparseCases := []struct {\n\t\tprefix, suffix, encoding string\n\t}{\n\t\t{SrvLocPrefix, SrvLocSuffix, \"JSON\"},\n\t\t{ServedFromPrefix, ServedFromSuffix, \"HTML\"},\n\t}\n\n\tfor _, pse := range parseCases {\n\t\tif locStart := strings.Index(sb, pse.prefix); locStart > 0 {\n\t\t\tlocStart += len(pse.prefix) // start of the text\n\t\t\tlocEnd := strings.Index(sb[locStart:], pse.suffix)\n\t\t\tif locEnd > 0 {\n\t\t\t\tlocation = sb[locStart : locStart+locEnd]\n\t\t\t} else {\n\t\t\t\tlog.Println(\"Found location prefix, but no suffix in\", pse.encoding, \"body\")\n\t\t\t}\n\t\t}\n\t}\n\n\treturn\n}", "func (*CaMetaCrls) GetPath() string { return \"/api/objects/ca/meta_crl/\" }", "func handleReadRequest(url string, httpMethod string, JWT_Token string) (response []byte, err error) {\n\thttpClient := &http.Client{}\n\t\n\tvar req *http.Request\n\treq, err = http.NewRequest(httpMethod, url, nil)\n\tif err != nil {\n\t\treturn \n\t}\n\n\treq.Header.Add(\"Authorization\", \"Bearer \"+JWT_Token)\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\n\tresp, err := httpClient.Do(req)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tresponse, err = ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn\n\t}\n\n\treturn\n\n}", "func (res *ClientHTTPResponse) ReadAll() ([]byte, error) {\n\trawBody, err := ioutil.ReadAll(res.rawResponse.Body)\n\n\tcerr := res.rawResponse.Body.Close()\n\tif cerr != nil {\n\t\t/* coverage ignore next line */\n\t\tres.req.Logger.Error(\"Could not close client resp body\",\n\t\t\tzap.String(\"error\", err.Error()),\n\t\t)\n\t}\n\n\tif err != nil {\n\t\tres.req.Logger.Error(\"Could not ReadAll() client body\",\n\t\t\tzap.String(\"error\", err.Error()),\n\t\t)\n\t\treturn nil, errors.Wrapf(\n\t\t\terr,\n\t\t\t\"Could not read client(%s) response body\",\n\t\t\tres.req.ClientID,\n\t\t)\n\t}\n\n\tres.rawResponseBytes = rawBody\n\treturn rawBody, nil\n}", "func readBody(c *Connection, l int) ([]uint8, error) {\n\tb := make([]byte, l)\n\tc.setReadDeadline()\n\tn, e := io.ReadFull(c.rdr, b)\n\tif n < l && n != 0 { // Short read, e is ErrUnexpectedEOF\n\t\tc.log(\"SHORT READ\", n, l, e)\n\t\treturn b[0 : n-1], e\n\t}\n\tif c.checkReadError(e) != nil { // Other erors\n\t\treturn b, e\n\t}\n\tc.setReadDeadline()\n\t_, _ = c.rdr.ReadByte() // trailing NUL\n\tif c.checkReadError(e) != nil { // Other erors\n\t\treturn b, e\n\t}\n\treturn b, e\n}", "func readHeaders(r *ResponseReader, res *Response) (err error) {\n\tfor {\n\t\tl, _, err := r.Reader.ReadLine()\n\t\tif err == nil {\n\t\t\tline := string(l)\n\t\t\tres.AppendRaw(line, true)\n\t\t\tif line == \"\" {\n\t\t\t\tbreak\n\t\t\t} else {\n\t\t\t\t_, err := res.Headers.AddLineString(line)\n\t\t\t\tif err != nil {\n\t\t\t\t\tbreak\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (brlr BlobsRenewLeaseResponse) Response() *http.Response {\n\treturn brlr.rawResponse\n}", "func Body(resp *http.Response) (string, error) {\n\tdefer resp.Body.Close()\n\tb, e := ioutil.ReadAll(resp.Body)\n\treturn string(b), e\n}", "func RCRL(ci, mr operand.Op) { ctx.RCRL(ci, mr) }", "func (c *VFSContext) readHttpLocation(httpURL string, httpHeaders map[string]string) ([]byte, error) {\n\treq, err := http.NewRequest(\"GET\", httpURL, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor k, v := range httpHeaders {\n\t\treq.Header.Add(k, v)\n\t}\n\tresponse, err := http.DefaultClient.Do(req)\n\tif response != nil {\n\t\tdefer response.Body.Close()\n\t}\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error fetching %q: %v\", httpURL, err)\n\t}\n\tbody, err := ioutil.ReadAll(response.Body)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error reading response for %q: %v\", httpURL, err)\n\t}\n\tif response.StatusCode == 404 {\n\t\treturn nil, os.ErrNotExist\n\t}\n\tif response.StatusCode != 200 {\n\t\treturn nil, fmt.Errorf(\"unexpected response code %q for %q: %v\", response.Status, httpURL, string(body))\n\t}\n\treturn body, nil\n}", "func (c *client) httpRequest(ctx context.Context, url string, timeout time.Duration) ([]byte, error) {\n\tlog.Debugf(\"httpRequest: %v\", url)\n\n\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"unable to create request: %v\", err)\n\t}\n\n\tclient := &http.Client{\n\t\tTimeout: timeout * time.Second,\n\t}\n\tresponse, err := client.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer response.Body.Close()\n\n\tif response.StatusCode != http.StatusOK {\n\t\tbody, err := ioutil.ReadAll(response.Body)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"dcrdata error: %v %v %v\",\n\t\t\t\tresponse.StatusCode, url, err)\n\t\t}\n\t\treturn nil, fmt.Errorf(\"dcrdata error: %v %v %s\",\n\t\t\tresponse.StatusCode, url, body)\n\t}\n\n\treturn ioutil.ReadAll(response.Body)\n}", "func (crlr ContainersRenewLeaseResponse) Response() *http.Response {\n\treturn crlr.rawResponse\n}", "func GetChainInfo_HTTP(url string) (respBody string, respStatus string) {\n\t//TODO : define a logger\n\t//fmt.Println(\"GetChainInfo_HTTP :\", url)\n\n\thttpclient := &http.Client{ Timeout: time.Second * waitSecs }\n\tresponse, err := httpclient.Get(url)\n\n\tif err != nil {\n\t\tfmt.Println(\"Error from httpclient.GET request, url, response: \", url, response)\n\t\tfmt.Println(\"Error from httpclient.GET request, err: \", err)\n\t\treturn err.Error(), \"Error from httpclient.GET request\"\n\t} else {\n\t\tdefer response.Body.Close()\n\t\tcontents, err := ioutil.ReadAll(response.Body)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"Error from ioutil.ReadAll during GET request, url, response, contents: \", url, response, contents)\n\t\t\tfmt.Println(\"Error from ioutil.ReadAll during GET request, err: \", err)\n\t\t\treturn err.Error(), \"Error from ioutil.ReadAll during GET request\"\n\t\t}\n\t\treturn string(contents), response.Status\n\t}\n}", "func pathFetchCRLViaCertPath(b *backend) *framework.Path {\n\treturn &framework.Path{\n\t\tPattern: `cert/crl`,\n\n\t\tCallbacks: map[logical.Operation]framework.OperationFunc{\n\t\t\tlogical.ReadOperation: b.pathFetchRead,\n\t\t},\n\n\t\tHelpSynopsis: pathFetchHelpSyn,\n\t\tHelpDescription: pathFetchHelpDesc,\n\t}\n}", "func (rpr ReadPathResponse) CacheControl() string {\n\treturn rpr.rawResponse.Header.Get(\"Cache-Control\")\n}", "func readFirstLine(r *ResponseReader, res *Response) (status int, err error) {\n\tl, _, err := r.Reader.ReadLine()\n\tif err == nil {\n\t\tline := string(l)\n\t\tproto, err := GetProtolVersionFromText(line[:8])\n\t\tif err == nil {\n\t\t\tstatus, err := strconv.Atoi(line[9:12])\n\t\t\tif err == nil {\n\t\t\t\tres.ProtoVersion = proto\n\t\t\t\tres.StatusCode = int16(status)\n\t\t\t\tres.StatusText = line[13:]\n\t\t\t\tres.AppendRaw(line, true)\n\t\t\t\treturn status, nil\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (c *Conn) readResponse(res *response_) error {\n\terr := c.readDataUnit()\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = IgnoreEOF(scanResponse.Scan(c.decoder, res))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif res.Result.IsError() {\n\t\treturn res.Result\n\t}\n\treturn nil\n}", "func ReadResponse(r *bfe_bufio.Reader, req *Request) (*Response, error) {\n\ttp := textproto.NewReader(r)\n\tresp := &Response{\n\t\tRequest: req,\n\t}\n\n\t// Parse the first line of the response.\n\tline, err := tp.ReadLine()\n\tif err != nil {\n\t\tif err == io.EOF {\n\t\t\terr = io.ErrUnexpectedEOF\n\t\t}\n\t\treturn nil, err\n\t}\n\tf := strings.SplitN(line, \" \", 3)\n\tif len(f) < 2 {\n\t\treturn nil, &badStringError{\"malformed HTTP response\", line}\n\t}\n\treasonPhrase := \"\"\n\tif len(f) > 2 {\n\t\treasonPhrase = f[2]\n\t}\n\tresp.Status = f[1] + \" \" + reasonPhrase\n\tresp.StatusCode, err = strconv.Atoi(f[1])\n\tif err != nil {\n\t\treturn nil, &badStringError{\"malformed HTTP status code\", f[1]}\n\t}\n\n\tresp.Proto = f[0]\n\tvar ok bool\n\tif resp.ProtoMajor, resp.ProtoMinor, ok = ParseHTTPVersion(resp.Proto); !ok {\n\t\treturn nil, &badStringError{\"malformed HTTP version\", resp.Proto}\n\t}\n\n\t// Parse the response headers.\n\tmimeHeader, err := tp.ReadMIMEHeader()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresp.Header = Header(mimeHeader)\n\n\tfixPragmaCacheControl(resp.Header)\n\n\terr = readTransfer(resp, r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn resp, nil\n}", "func getResponseBody(r io.Reader) ([]byte, error) {\n\n\tbody, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn nil, err\n\t}\n\treturn body, nil\n}", "func GetChainInfo_HTTPS(url string) (respBody string, respStatus string) {\n\t//TODO : define a logger\n\t//fmt.Println(\"GetChainInfo_HTTPS :\", url)\n\n tr := &http.Transport{\n\t TLSClientConfig: &tls.Config{RootCAs: nil},\n\t DisableCompression: true,\n }\n httpsclient := &http.Client{ Timeout: time.Second * waitSecs, Transport: tr }\n response, err := httpsclient.Get(url)\n\tif err != nil {\n\t\t\tfmt.Println(\"ERROR from httpsclient.GET request, url, response: \", url, response)\n\t\t\tfmt.Println(\"ERROR from httpsclient.GET request, err: \", err)\n\t\t\treturn err.Error(), \"ERROR from httpsclient.GET request\"\n\t} else {\n\t\tdefer response.Body.Close()\n\t\tcontents, err := ioutil.ReadAll(response.Body)\n\t if err != nil {\n\t\t\tfmt.Println(\"ERROR from https ioutil.ReadAll during GET request, url, response, contents: \", url, response, contents)\n\t\t\tfmt.Println(\"ERROR from https ioutil.ReadAll during GET request, err: \", err)\n\t\t\treturn err.Error(), \"ERROR from https ioutil.ReadAll during GET request\"\n\t\t}\n\t\treturn string(contents), response.Status\n\t}\n}", "func getI2Cread(w http.ResponseWriter, r *http.Request) {\n\tvar reg int64\n\tsReg := r.URL.Query().Get(\"reg\")\n\tif sReg != \"\" {\n\t\treg, _ = strconv.ParseInt(sReg, 0, 8)\n\t} else {\n\t\treg = 0x1a\n\t}\n\tsensor, _ := strconv.ParseInt(r.URL.Query().Get(\"sensor\"), 0, 8)\n\ts, err := ltc.ReadI2CWord(int(sensor), LTC6813.LTC2944Address, uint8(reg))\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tfmt.Fprint(w, \"Request = \", r.URL.Query().Get(\"reg\"), \"\\n\")\n\tif err != nil {\n\t\tfmt.Fprint(w, \"Error - \", err)\n\t} else {\n\t\tfmt.Fprintf(w, s)\n\t}\n}", "func (c *caCache) getCRL(url string, cert *x509.Certificate) (*pkix.CertificateList, error) {\n\t// See comments in getCert for explanation of locking pattern.\n\t//\n\t// getCRL also ignores cached CRLs if they are older than the threshold.\n\tc.mu.RLock()\n\tcrl, ok := c.crls[url]\n\tc.mu.RUnlock()\n\tif ok && time.Since(crl.fetchedAt) < crlCacheDuration {\n\t\treturn crl.crl, nil\n\t}\n\n\tc.mu.Lock()\n\tdefer c.mu.Unlock()\n\tcrl, ok = c.crls[url]\n\tif ok && time.Since(crl.fetchedAt) < crlCacheDuration {\n\t\treturn crl.crl, nil\n\t}\n\n\tcrlRaw, err := fetchCRL(url)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"fetching %q: %v\", url, err)\n\t}\n\tif err := cert.CheckCRLSignature(crlRaw); err != nil {\n\t\treturn nil, fmt.Errorf(\"verifying CRL signature for %q: %v\", url, err)\n\t}\n\tc.crls[url] = &cachedCRL{crl: crlRaw, fetchedAt: time.Now()}\n\treturn crlRaw, nil\n}", "func (resp *Response) ReadBody() []byte {\n\tif resp.Resp != nil && resp.Resp.Body != nil {\n\t\tbody, err := ioutil.ReadAll(resp.Resp.Body)\n\t\tif err != nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn body\n\t}\n\treturn nil\n}", "func (c *Client) getLbResponse(verb, path string, reqModel, resModel interface{}) error {\n\treturn c.getResponse(lbaasBaseUrl+path, verb, reqModel, resModel)\n}", "func GetResponse(url string) []byte {\n resp, err := http.Get(url)\n\n // handle the error if there is one\n if err != nil {\n panic(err)\n }\n\n // do this now so it won't be forgotten\n defer resp.Body.Close()\n\n // reads html as a slice of bytes\n html, err := ioutil.ReadAll(resp.Body)\n if err != nil {\n panic(err)\n }\n\n return html\n}", "func (c *Client) RenterDownloadHTTPResponseGet(siaPath string, offset, length uint64) (resp []byte, err error) {\n\tsiaPath = strings.TrimPrefix(siaPath, \"/\")\n\tquery := fmt.Sprintf(\"%s?offset=%d&length=%d&httpresp=true\", siaPath, offset, length)\n\tresp, err = c.getRawResponse(\"/renter/download/\" + query)\n\treturn\n}", "func readUntilCRLF(buf []byte, r io.Reader, idx int) ([]byte, error) {\n\t// Read until we find the cRLF or we hit our max possible header length\n\tfor idx < 107 {\n\t\tc, err := r.Read(buf[idx : idx+1])\n\t\tif c != 1 {\n\t\t\treturn nil, errors.New(\"expected to read more bytes, but got none\")\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif bytes.Equal(buf[idx-1:idx+1], []byte(cRLF)) {\n\t\t\treturn buf[0 : idx-1], nil\n\t\t}\n\t\tidx++\n\t}\n\treturn nil, errors.New(\"gave up after 107 bytes\")\n}", "func printResponseBody(res *http.Response) {\n\tdefer res.Body.Close()\n\tresponseBodyBytes, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\tresponseBody := string(responseBodyBytes)\n\tfmt.Println(responseBody)\n}", "func GetBody(w http.ResponseWriter, url string) []byte{\n\t\n\tres, err := http.Get(url)\n\tif err != nil{\n\t\tfmt.Fprintln(w, \"error or something\")\n\t\t// must break out \n\t}\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil{\n\t\tfmt.Fprintln(w, \"could not get content\")\n\t\t // must break out\n\t}\n\treturn body\n}", "func readBody(t *testing.T, r io.ReadCloser) *bytes.Buffer {\n\tdefer r.Close()\n\n\tvar b []byte\n\tbuf := bytes.NewBuffer(b)\n\t_, err := buf.ReadFrom(r)\n\tcheck(t, err)\n\n\treturn buf\n}", "func (f Fetcher) processBody() []byte {\n\tb, err := io.ReadAll(f.resp.Body)\n\tf.resp.Body.Close()\n\tif f.resp.StatusCode >= 300 {\n\t\tlog.Fatalf(\"Response failed with status code: %d\\n and body: %s\\n\", f.resp.StatusCode, b)\n\t}\n\tcheck(err)\n\treturn b\n}", "func (s *ResponseModifier) ReadBytes() ([]byte, error) {\n\tbuf, err := ioutil.ReadAll(s.Response.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ts.Bytes(buf)\n\treturn buf, nil\n}", "func ConnRead(c *tls.Conn, b []byte) (int, error)", "func (a *CallRoutingApiService) ReadIVRPromptContent(ctx context.Context, accountId string, promptId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Get\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/restapi/v1.0/account/{accountId}/ivr-prompts/{promptId}/content\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"accountId\"+\"}\", fmt.Sprintf(\"%v\", accountId), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"promptId\"+\"}\", fmt.Sprintf(\"%v\", promptId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func main() {\n\n\tlink := \"https://talaikis.com/api/quotes/random/\"\n\n\t// skip TLS check\n\ttr := &http.Transport{\n\t\tTLSClientConfig: &tls.Config{InsecureSkipVerify: true},\n\t}\n\tclient := &http.Client{Transport: tr}\n\tresponse, err := client.Get(link)\n\tif err != nil {\n\t\tfmt.Println(err)\n\t}\n\n\tdefer response.Body.Close()\n\n\tcontent, _ := ioutil.ReadAll(response.Body)\n\tfmt.Println(string(content))\n\n}", "func (client HttpSourceClient) getContent(endpoint string) (bytes []byte, err error) {\n\thttpClient := &http.Client{Timeout: client.timeoutPeriod}\n\tresp, err := httpClient.Get(apiBaseUrl + endpoint)\n\tif err != nil {\n\t\treturn\n\t}\n\tdefer func() {\n\t\tclosingErr := resp.Body.Close()\n\t\tif err == nil {\n\t\t\terr = closingErr\n\t\t}\n\t}()\n\treturn io.ReadAll(resp.Body)\n}", "func ReadURL(url string) ([]byte, error) {\n response, err := http.Get(url)\n if err != nil {\n return nil, err\n }\n context, err := ioutil.ReadAll(response.Body)\n response.Body.Close()\n return context, err\n}", "func getI2CreadByte(w http.ResponseWriter, r *http.Request) {\n\tsensor, _ := strconv.ParseInt(r.URL.Query().Get(\"sensor\"), 0, 8)\n\tvar reg int64\n\tsReg := r.URL.Query().Get(\"reg\")\n\tif sReg != \"\" {\n\t\treg, _ = strconv.ParseInt(sReg, 0, 8)\n\t} else {\n\t\treg = 0x1a\n\t}\n\ts, err := ltc.ReadI2CByte(int(sensor), LTC6813.LTC2944Address, uint8(reg))\n\tw.Header().Set(\"Access-Control-Allow-Origin\", \"*\")\n\tfmt.Fprint(w, \"Request = \", r.URL.Query().Get(\"reg\"), \"\\n\")\n\tif err != nil {\n\t\tfmt.Fprint(w, \"Error - \", err)\n\t} else {\n\t\tfmt.Fprintf(w, s)\n\t}\n}", "func ReadResponse(r *http.Response, target interface{}) (err error) {\n\tvar (\n\t\tbuf = bufio.NewReader(r.Body)\n\t\ttokens = []byte(\"{}[]\")\n\t)\n\n\tdefer r.Body.Close()\n\n\t// remove leading character unless right tocken will be found\n\tfor {\n\t\tif r, _ := buf.Peek(1); !bytes.Contains(tokens, r) {\n\t\t\tbuf.Read(r)\n\n\t\t\tcontinue\n\t\t}\n\n\t\tbreak\n\t}\n\n\treturn json.NewDecoder(buf).Decode(&target)\n}", "func (crlr ContainersRenewLeaseResponse) LastModified() time.Time {\n\ts := crlr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func (resp *PharosResponse) readResponse() {\n\tif !resp.hasBeenRead && resp.Response != nil && resp.Response.Body != nil {\n\t\tresp.data, resp.Error = ioutil.ReadAll(resp.Response.Body)\n\t\tresp.Response.Body.Close()\n\t\tresp.hasBeenRead = true\n\t}\n}", "func (avisess *AviSession) fetchBody(verb, uri string, resp *http.Response) (result []byte, err error) {\n\turl := avisess.prefix + uri\n\terrorResult := AviError{HttpStatusCode: resp.StatusCode, Verb: verb, Url: url}\n\n\tif resp.StatusCode == 204 {\n\t\t// no content in the response\n\t\treturn result, nil\n\t}\n\t// It cannot be assumed that the error will always be from server side in response.\n\t// Error could be from HTTP client side which will not have body in response.\n\t// Need to change our API resp handling design if we want to handle client side errors separately.\n\n\t// Below block will take care for errors without body.\n\tif resp.Body == nil {\n\t\tglog.Errorf(\"Encountered client side error: %+v\", resp)\n\t\terrorResult.Message = &resp.Status\n\t\treturn result, errorResult\n\t}\n\n\tdefer resp.Body.Close()\n\tresult, err = ioutil.ReadAll(resp.Body)\n\tif err == nil {\n\t\tif resp.StatusCode < 200 || resp.StatusCode > 299 || resp.StatusCode == 500 {\n\t\t\tmres, merr := convertAviResponseToMapInterface(result)\n\t\t\tglog.Infof(\"Error code %v parsed resp: %v err %v\",\n\t\t\t\tresp.StatusCode, mres, merr)\n\t\t\temsg := fmt.Sprintf(\"%v\", mres)\n\t\t\terrorResult.Message = &emsg\n\t\t} else {\n\t\t\treturn result, nil\n\t\t}\n\t} else {\n\t\terrmsg := fmt.Sprintf(\"Response body read failed: %v\", err)\n\t\terrorResult.Message = &errmsg\n\t\tglog.Errorf(\"Error in reading uri %v %v\", uri, err)\n\t}\n\treturn result, errorResult\n}", "func (api Tumblr) rawGet(url string) []byte {\n\trequest, err := http.NewRequest(\"GET\", url, nil)\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\tapi.oauthService.Sign(request, &api.config)\n\tclient := new(http.Client)\n\tclientResponse, err := client.Do(request)\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t\treturn []byte{0}\n\t}\n\tdefer clientResponse.Body.Close()\n\n\tbody, err := ioutil.ReadAll(clientResponse.Body)\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn body\n}", "func (bclr BlobsChangeLeaseResponse) LastModified() time.Time {\n\ts := bclr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func readBody(win *acme.Win) ([]byte, error) {\n\tvar body []byte\n\tbuf := make([]byte, 8000)\n\tfor {\n\t\tn, err := win.Read(\"body\", buf)\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tbody = append(body, buf[0:n]...)\n\t}\n\treturn body, nil\n}", "func (crlr ContainersReleaseLeaseResponse) Response() *http.Response {\n\treturn crlr.rawResponse\n}", "func (brlr BlobsRenewLeaseResponse) LastModified() time.Time {\n\ts := brlr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func (r *httpResponse) Read(p []byte) (n int, err error) {\n\tif len(r.left) != 0 {\n\t\tn = copy(p, r.left)\n\t\tr.left = r.left[n:]\n\t\tif len(r.left) != 0 {\n\t\t\treturn\n\t\t}\n\t}\n\tvar m int\n\tif n < len(p) {\n\t\tm, err = r.resp.Body.Read(p[n:])\n\t\tn += m\n\t\tif err != nil {\n\t\t\tif !r.plain && n > 0 && p[n-1] == ']' {\n\t\t\t\tn--\n\t\t\t}\n\t\t\tif err != io.EOF {\n\t\t\t\terr = NewError(NetworkError, \"http.Response.Body.Read failed.\", map[string]interface{}{\n\t\t\t\t\t\"error\": err.Error(),\n\t\t\t\t})\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t}\n\tif r.plain || n == 0 || p[n-1] != ']' {\n\t\treturn\n\t}\n\tm, err = r.resp.Body.Read(r.buf[:])\n\tif err == nil {\n\t\tr.left = r.buf[:m]\n\t\treturn\n\t}\n\tif m == 0 {\n\t\tn--\n\t}\n\tif err != io.EOF {\n\t\terr = NewError(NetworkError, \"http.Response.Body.Read failed.\", map[string]interface{}{\n\t\t\t\"error\": err.Error(),\n\t\t})\n\t}\n\treturn\n}", "func (c *apiClient) do(request *http.Request) ([]byte, error) {\n\tresponse, err := c.HTTPClient.Do(request)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer response.Body.Close()\n\t// Implementation note: always read and log the response body since\n\t// it's quite useful to see the response JSON on API error.\n\tr := io.LimitReader(response.Body, DefaultMaxBodySize)\n\tdata, err := netxlite.ReadAllContext(request.Context(), r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tc.Logger.Debugf(\"httpx: response body length: %d bytes\", len(data))\n\tif c.LogBody {\n\t\tc.Logger.Debugf(\"httpx: response body: %s\", string(data))\n\t}\n\tif response.StatusCode >= 400 {\n\t\treturn nil, fmt.Errorf(\"%w: %s\", ErrRequestFailed, response.Status)\n\t}\n\treturn data, nil\n}", "func (o *DownloadTLSPrivateKeyReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDownloadTLSPrivateKeyOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewDownloadTLSPrivateKeyUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 403:\n\t\tresult := NewDownloadTLSPrivateKeyForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (crlr ContainersReleaseLeaseResponse) LastModified() time.Time {\n\ts := crlr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func (rpr ReadPathResponse) ContentRange() string {\n\treturn rpr.rawResponse.Header.Get(\"Content-Range\")\n}", "func (c *Client) constructResp(conn io.Reader, req *Request) (*Response, error) {\n\t// TODO\n\t// Receive and prase repsonse message\n\tresp := &Response{Header: make(map[string]string)}\n\treader := bufio.NewReaderSize(conn, ClientResponseBufSize)\n\tvar wholeLine []byte\n\tvar lastWait = false\n\tvar step = ResponseStepStatusLine\nLOOP:\n\tfor {\n\t\tif line, isWait, err := reader.ReadLine(); err == nil {\n\t\t\tif !isWait {\n\t\t\t\t// Complete line\n\t\t\t\tif !lastWait {\n\t\t\t\t\twholeLine = line\n\t\t\t\t} else {\n\t\t\t\t\twholeLine = append(wholeLine, line...)\n\t\t\t\t}\n\t\t\t\t// Process the line\n\t\t\t\tswitch step {\n\t\t\t\tcase ResponseStepStatusLine:\n\t\t\t\t\t{\n\t\t\t\t\t\tstatusLineWords := strings.SplitN(string(wholeLine), \" \", 3)\n\t\t\t\t\t\tresp.Proto = statusLineWords[0]\n\t\t\t\t\t\tresp.StatusCode, err = strconv.Atoi(statusLineWords[1])\n\t\t\t\t\t\tresp.Status = statusLineWords[2]\n\t\t\t\t\t\tstep = ResponseStepHeader\n\t\t\t\t\t}\n\t\t\t\tcase ResponseStepHeader:\n\t\t\t\t\t{\n\t\t\t\t\t\tif len(line) != 0 {\n\t\t\t\t\t\t\theaderWords := strings.SplitN(string(wholeLine), \": \", 2)\n\t\t\t\t\t\t\tresp.Header[headerWords[0]] = headerWords[1]\n\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t// fmt.Println(resp.Header)\n\t\t\t\t\t\t\tstep = ResponseStepBody\n\t\t\t\t\t\t\tcLenStr, ok := resp.Header[HeaderContentLength]\n\t\t\t\t\t\t\tif !ok {\n\t\t\t\t\t\t\t\treturn nil, errors.New(\"No Content-Length in Response header\")\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tcLen, _ := strconv.ParseInt(cLenStr, 10, 64)\n\t\t\t\t\t\t\tresp.ContentLength = cLen\n\n\t\t\t\t\t\t\t// Transfer the body to Response\n\t\t\t\t\t\t\tresp.Body = &ResponseReader{\n\t\t\t\t\t\t\t\tc: c,\n\t\t\t\t\t\t\t\tconn: conn,\n\t\t\t\t\t\t\t\thost: req.URL.Host,\n\t\t\t\t\t\t\t\tr: &io.LimitedReader{\n\t\t\t\t\t\t\t\t\tR: reader,\n\t\t\t\t\t\t\t\t\tN: resp.ContentLength,\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak LOOP\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\tcase ResponseStepBody:\n\t\t\t\t\t{\n\t\t\t\t\t\tpanic(\"Cannot be here\")\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\t// Not complete\n\t\t\t\tif !lastWait {\n\t\t\t\t\twholeLine = line\n\t\t\t\t} else {\n\t\t\t\t\twholeLine = append(wholeLine, line...)\n\t\t\t\t}\n\t\t\t}\n\t\t\tlastWait = isWait\n\t\t} else if err == io.EOF {\n\t\t\tbreak\n\t\t} else {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn resp, nil\n}", "func (s *ResponseModifier) ReadString() (string, error) {\n\tbuf, err := ioutil.ReadAll(s.Response.Body)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\ts.Bytes(buf)\n\treturn string(buf), nil\n}", "func (calr ContainersAcquireLeaseResponse) LastModified() time.Time {\n\ts := calr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func getResp(url string) []byte {\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\tfmt.Println(\"Cannot evaluate the requested url\")\n\t\tos.Exit(1)\n\t}\n\tdefer resp.Body.Close()\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\tfmt.Println(\"Cannot evaulate response body\")\n\t\tos.Exit(1)\n\t}\n\tjsonString := fmt.Sprintf(\"%s\", string(body))\n\treturn []byte(jsonString)\n}", "func (bblr BlobsBreakLeaseResponse) LastModified() time.Time {\n\ts := bblr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func callback(resp http.ResponseWriter, request *http.Request) {\n\tbody, err := ioutil.ReadAll(request.Body)\n\tif err != nil {\n\t\tlog.Println(\"Failed reading body\")\n\t\tresp.WriteHeader(401)\n\t\tresp.Write([]byte(fmt.Sprintf(`{\"success\": false}`)))\n\t\treturn\n\t}\n\n\tlog.Println(string(body))\n\n\tresp.WriteHeader(200)\n\tresp.Write([]byte(fmt.Sprintf(`{\"success\": true}`)))\n\treturn\n}", "func (cblr ContainersBreakLeaseResponse) LastModified() time.Time {\n\ts := cblr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func (c *pbClientCodec) ReadResponseHeader(r *rpc.Response) error {\n\tdata, err := ReadNetString(c.rwc)\n\tif err != nil {\n\t\treturn err\n\t}\n\trtmp := new(Response)\n\terr = proto.Unmarshal(data, rtmp)\n\tif err != nil {\n\t\treturn err\n\t}\n\tr.ServiceMethod = *rtmp.ServiceMethod\n\tr.Seq = *rtmp.Seq\n\tr.Error = *rtmp.Error\n\treturn nil\n}", "func (ch *clientSecureChannel) readResponse() (ua.ServiceResponse, error) {\n\tch.receivingSemaphore.Lock()\n\tdefer ch.receivingSemaphore.Unlock()\n\tvar res ua.ServiceResponse\n\tvar paddingHeaderSize int\n\tvar plainHeaderSize int\n\tvar bodySize int\n\tvar paddingSize int\n\tsignatureSize := ch.securityPolicy.SymSignatureSize()\n\n\tvar bodyStream = buffer.NewPartitionAt(bufferPool)\n\tdefer bodyStream.Reset()\n\n\tvar receiveBuffer = *(bytesPool.Get().(*[]byte))\n\tdefer bytesPool.Put(&receiveBuffer)\n\n\tvar bodyDecoder = ua.NewBinaryDecoder(bodyStream, ch)\n\n\t// read chunks\n\tvar chunkCount int32\n\tvar isFinal bool\n\n\tfor !isFinal {\n\t\tchunkCount++\n\t\tif i := int32(ch.maxChunkCount); i > 0 && chunkCount > i {\n\t\t\treturn nil, ua.BadEncodingLimitsExceeded\n\t\t}\n\n\t\tcount, err := ch.Read(receiveBuffer)\n\t\tif err != nil || count == 0 {\n\t\t\treturn nil, ua.BadSecureChannelClosed\n\t\t}\n\n\t\tvar stream = bytes.NewReader(receiveBuffer[0:count])\n\t\tvar decoder = ua.NewBinaryDecoder(stream, ch)\n\n\t\tvar messageType uint32\n\t\tif err := decoder.ReadUInt32(&messageType); err != nil {\n\t\t\treturn nil, ua.BadDecodingError\n\t\t}\n\t\tvar messageLength uint32\n\t\tif err := decoder.ReadUInt32(&messageLength); err != nil {\n\t\t\treturn nil, ua.BadDecodingError\n\t\t}\n\n\t\tif count != int(messageLength) {\n\t\t\treturn nil, ua.BadDecodingError\n\t\t}\n\n\t\tswitch messageType {\n\t\tcase ua.MessageTypeChunk, ua.MessageTypeFinal:\n\t\t\t// header\n\t\t\tvar channelID uint32\n\t\t\tif err := decoder.ReadUInt32(&channelID); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tif channelID != ch.channelID {\n\t\t\t\treturn nil, ua.BadTCPSecureChannelUnknown\n\t\t\t}\n\n\t\t\t// symmetric security header\n\t\t\tvar tokenID uint32\n\t\t\tif err := decoder.ReadUInt32(&tokenID); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\n\t\t\t// detect new token\n\t\t\tch.tokenLock.RLock()\n\t\t\tif tokenID != ch.receivingTokenID {\n\t\t\t\tch.receivingTokenID = tokenID\n\n\t\t\t\tswitch ch.securityMode {\n\t\t\t\tcase ua.MessageSecurityModeSignAndEncrypt, ua.MessageSecurityModeSign:\n\t\t\t\t\t// (re)create remote security keys for verifying, decrypting\n\t\t\t\t\tremoteSecurityKey := calculatePSHA(ch.localNonce, ch.remoteNonce, len(ch.remoteSigningKey)+len(ch.remoteEncryptingKey)+len(ch.remoteInitializationVector), ch.securityPolicyURI)\n\t\t\t\t\tjj := copy(ch.remoteSigningKey, remoteSecurityKey)\n\t\t\t\t\tjj += copy(ch.remoteEncryptingKey, remoteSecurityKey[jj:])\n\t\t\t\t\tcopy(ch.remoteInitializationVector, remoteSecurityKey[jj:])\n\n\t\t\t\t\t// update verifier and decrypter with new symmetric keys\n\t\t\t\t\tch.symVerifyHMAC = ch.securityPolicy.SymHMACFactory(ch.remoteSigningKey)\n\t\t\t\t\tif ch.securityMode == ua.MessageSecurityModeSignAndEncrypt {\n\t\t\t\t\t\tch.symDecryptingBlockCipher, _ = aes.NewCipher(ch.remoteEncryptingKey)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\tch.tokenLock.RUnlock()\n\n\t\t\tplainHeaderSize = 16\n\t\t\t// decrypt\n\t\t\tif ch.securityMode == ua.MessageSecurityModeSignAndEncrypt {\n\t\t\t\tspan := receiveBuffer[plainHeaderSize:count]\n\t\t\t\tif len(span)%ch.symDecryptingBlockCipher.BlockSize() != 0 {\n\t\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t\t}\n\t\t\t\tcipher.NewCBCDecrypter(ch.symDecryptingBlockCipher, ch.remoteInitializationVector).CryptBlocks(span, span)\n\t\t\t}\n\n\t\t\t// verify\n\t\t\tswitch ch.securityMode {\n\t\t\tcase ua.MessageSecurityModeSignAndEncrypt, ua.MessageSecurityModeSign:\n\t\t\t\tsigStart := count - signatureSize\n\t\t\t\tch.symVerifyHMAC.Reset()\n\t\t\t\tch.symVerifyHMAC.Write(receiveBuffer[:sigStart])\n\t\t\t\tsig := ch.symVerifyHMAC.Sum(nil)\n\t\t\t\tif !hmac.Equal(sig, receiveBuffer[sigStart:count]) {\n\t\t\t\t\treturn nil, ua.BadSecurityChecksFailed\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// read sequence header\n\t\t\tvar unused uint32\n\t\t\tif err = decoder.ReadUInt32(&unused); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\n\t\t\tif err = decoder.ReadUInt32(&unused); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\n\t\t\t// body\n\t\t\tswitch ch.securityMode {\n\t\t\tcase ua.MessageSecurityModeSignAndEncrypt:\n\t\t\t\tif ch.securityPolicy.SymEncryptionBlockSize() > 256 {\n\t\t\t\t\tpaddingHeaderSize = 2\n\t\t\t\t\tstart := int(messageLength) - signatureSize - paddingHeaderSize\n\t\t\t\t\tpaddingSize = int(binary.LittleEndian.Uint16(receiveBuffer[start : start+2]))\n\t\t\t\t} else {\n\t\t\t\t\tpaddingHeaderSize = 1\n\t\t\t\t\tstart := int(messageLength) - signatureSize - paddingHeaderSize\n\t\t\t\t\tpaddingSize = int(receiveBuffer[start])\n\t\t\t\t}\n\t\t\t\tbodySize = int(messageLength) - plainHeaderSize - sequenceHeaderSize - paddingSize - paddingHeaderSize - signatureSize\n\n\t\t\tdefault:\n\t\t\t\tbodySize = int(messageLength) - plainHeaderSize - sequenceHeaderSize - signatureSize\n\t\t\t}\n\n\t\t\tm := plainHeaderSize + sequenceHeaderSize\n\t\t\tn := m + bodySize\n\t\t\t_, err = bodyStream.Write(receiveBuffer[m:n])\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tisFinal = messageType == ua.MessageTypeFinal\n\n\t\tcase ua.MessageTypeOpenFinal:\n\t\t\t// header\n\t\t\tvar unused1 uint32\n\t\t\tif err = decoder.ReadUInt32(&unused1); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\t// asymmetric header\n\t\t\tvar unused2 string\n\t\t\tif err = decoder.ReadString(&unused2); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tvar unused3 ua.ByteString\n\t\t\tif err := decoder.ReadByteString(&unused3); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tif err := decoder.ReadByteString(&unused3); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tplainHeaderSize = count - stream.Len()\n\n\t\t\t// decrypt\n\t\t\tswitch ch.securityMode {\n\t\t\tcase ua.MessageSecurityModeSignAndEncrypt, ua.MessageSecurityModeSign:\n\t\t\t\tcipherTextBlockSize := ch.localPrivateKeySize\n\t\t\t\tcipherText := make([]byte, cipherTextBlockSize)\n\t\t\t\tjj := plainHeaderSize\n\t\t\t\tfor ii := plainHeaderSize; ii < int(messageLength); ii += cipherTextBlockSize {\n\t\t\t\t\tcopy(cipherText, receiveBuffer[ii:])\n\t\t\t\t\t// decrypt with local private key.\n\t\t\t\t\tplainText, err := ch.securityPolicy.RSADecrypt(ch.localPrivateKey, cipherText)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\t\t\t\t\tjj += copy(receiveBuffer[jj:], plainText)\n\t\t\t\t}\n\t\t\t\t// msg is shorter after decryption\n\t\t\t\tmessageLength = uint32(jj)\n\t\t\t}\n\n\t\t\t// verify\n\t\t\tswitch ch.securityMode {\n\t\t\tcase ua.MessageSecurityModeSignAndEncrypt, ua.MessageSecurityModeSign:\n\t\t\t\t// verify with remote public key.\n\t\t\t\tsigEnd := int(messageLength)\n\t\t\t\tsigStart := sigEnd - ch.remotePublicKeySize\n\t\t\t\terr := ch.securityPolicy.RSAVerify(ch.remotePublicKey, receiveBuffer[:sigStart], receiveBuffer[sigStart:sigEnd])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// sequence header\n\t\t\tvar unused uint32\n\t\t\tif err = decoder.ReadUInt32(&unused); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tif err = decoder.ReadUInt32(&unused); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\n\t\t\t// body\n\t\t\tswitch ch.securityMode {\n\t\t\tcase ua.MessageSecurityModeSignAndEncrypt, ua.MessageSecurityModeSign:\n\t\t\t\tcipherTextBlockSize := ch.localPrivateKeySize\n\t\t\t\tsignatureSize := ch.remotePublicKeySize\n\t\t\t\tif cipherTextBlockSize > 256 {\n\t\t\t\t\tpaddingHeaderSize = 2\n\t\t\t\t\tstart := int(messageLength) - signatureSize - paddingHeaderSize\n\t\t\t\t\tpaddingSize = int(binary.LittleEndian.Uint16(receiveBuffer[start : start+2]))\n\t\t\t\t} else {\n\t\t\t\t\tpaddingHeaderSize = 1\n\t\t\t\t\tstart := int(messageLength) - signatureSize - paddingHeaderSize\n\t\t\t\t\tpaddingSize = int(receiveBuffer[start])\n\t\t\t\t}\n\t\t\t\tbodySize = int(messageLength) - plainHeaderSize - sequenceHeaderSize - paddingSize - paddingHeaderSize - signatureSize\n\n\t\t\tdefault:\n\t\t\t\tbodySize = int(messageLength) - plainHeaderSize - sequenceHeaderSize // - ch.asymRemoteSignatureSize\n\t\t\t}\n\n\t\t\tm := plainHeaderSize + sequenceHeaderSize\n\t\t\tn := m + bodySize\n\t\t\tif _, err := bodyStream.Write(receiveBuffer[m:n]); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tisFinal = messageType == ua.MessageTypeOpenFinal\n\n\t\tcase ua.MessageTypeError, ua.MessageTypeAbort:\n\t\t\tvar statusCode uint32\n\t\t\tif err := decoder.ReadUInt32(&statusCode); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tvar unused string\n\t\t\tif err = decoder.ReadString(&unused); err != nil {\n\t\t\t\treturn nil, ua.BadDecodingError\n\t\t\t}\n\t\t\tch.errCode = ua.StatusCode(statusCode)\n\t\t\treturn nil, ua.StatusCode(statusCode)\n\n\t\tdefault:\n\t\t\treturn nil, ua.BadUnknownResponse\n\t\t}\n\n\t\tif i := int64(ch.maxMessageSize); i > 0 && bodyStream.Len() > i {\n\t\t\treturn nil, ua.BadEncodingLimitsExceeded\n\t\t}\n\t}\n\n\tvar nodeID ua.NodeID\n\tif err := bodyDecoder.ReadNodeID(&nodeID); err != nil {\n\t\treturn nil, ua.BadDecodingError\n\t}\n\tvar temp interface{}\n\tswitch nodeID {\n\n\t// frequent\n\tcase ua.ObjectIDPublishResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.PublishResponse)\n\tcase ua.ObjectIDReadResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.ReadResponse)\n\tcase ua.ObjectIDBrowseResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.BrowseResponse)\n\tcase ua.ObjectIDBrowseNextResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.BrowseNextResponse)\n\tcase ua.ObjectIDTranslateBrowsePathsToNodeIDsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.TranslateBrowsePathsToNodeIDsResponse)\n\tcase ua.ObjectIDWriteResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.WriteResponse)\n\tcase ua.ObjectIDCallResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CallResponse)\n\tcase ua.ObjectIDHistoryReadResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.HistoryReadResponse)\n\n\t// moderate\n\tcase ua.ObjectIDGetEndpointsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.GetEndpointsResponse)\n\tcase ua.ObjectIDOpenSecureChannelResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.OpenSecureChannelResponse)\n\tcase ua.ObjectIDCloseSecureChannelResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CloseSecureChannelResponse)\n\tcase ua.ObjectIDCreateSessionResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CreateSessionResponse)\n\tcase ua.ObjectIDActivateSessionResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.ActivateSessionResponse)\n\tcase ua.ObjectIDCloseSessionResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CloseSessionResponse)\n\tcase ua.ObjectIDCreateMonitoredItemsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CreateMonitoredItemsResponse)\n\tcase ua.ObjectIDDeleteMonitoredItemsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.DeleteMonitoredItemsResponse)\n\tcase ua.ObjectIDCreateSubscriptionResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CreateSubscriptionResponse)\n\tcase ua.ObjectIDDeleteSubscriptionsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.DeleteSubscriptionsResponse)\n\tcase ua.ObjectIDSetPublishingModeResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.SetPublishingModeResponse)\n\tcase ua.ObjectIDServiceFaultEncodingDefaultBinary:\n\t\ttemp = new(ua.ServiceFault)\n\n\t\t// rare\n\tcase ua.ObjectIDModifyMonitoredItemsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.ModifyMonitoredItemsResponse)\n\tcase ua.ObjectIDSetMonitoringModeResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.SetMonitoringModeResponse)\n\tcase ua.ObjectIDSetTriggeringResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.SetTriggeringResponse)\n\tcase ua.ObjectIDModifySubscriptionResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.ModifySubscriptionResponse)\n\tcase ua.ObjectIDRepublishResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.RepublishResponse)\n\tcase ua.ObjectIDTransferSubscriptionsResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.TransferSubscriptionsResponse)\n\tcase ua.ObjectIDFindServersResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.FindServersResponse)\n\tcase ua.ObjectIDFindServersOnNetworkResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.FindServersOnNetworkResponse)\n\tcase ua.ObjectIDRegisterServerResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.RegisterServerResponse)\n\tcase ua.ObjectIDRegisterServer2ResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.RegisterServer2Response)\n\tcase ua.ObjectIDCancelResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.CancelResponse)\n\tcase ua.ObjectIDAddNodesResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.AddNodesResponse)\n\tcase ua.ObjectIDAddReferencesResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.AddReferencesResponse)\n\tcase ua.ObjectIDDeleteNodesResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.DeleteNodesResponse)\n\tcase ua.ObjectIDDeleteReferencesResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.DeleteReferencesResponse)\n\tcase ua.ObjectIDRegisterNodesResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.RegisterNodesResponse)\n\tcase ua.ObjectIDUnregisterNodesResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.UnregisterNodesResponse)\n\tcase ua.ObjectIDQueryFirstResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.QueryFirstResponse)\n\tcase ua.ObjectIDQueryNextResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.QueryNextResponse)\n\tcase ua.ObjectIDHistoryUpdateResponseEncodingDefaultBinary:\n\t\ttemp = new(ua.HistoryUpdateResponse)\n\tdefault:\n\t\treturn nil, ua.BadDecodingError\n\t}\n\n\t// decode fields from message stream\n\tif err := bodyDecoder.Decode(temp); err != nil {\n\t\treturn nil, ua.BadDecodingError\n\t}\n\tres = temp.(ua.ServiceResponse)\n\n\tif ch.trace {\n\t\tb, _ := json.MarshalIndent(res, \"\", \" \")\n\t\tlog.Printf(\"%s%s\", reflect.TypeOf(res).Elem().Name(), b)\n\t}\n\n\treturn res, nil\n}", "func (r *Response) getInternalReader() io.Reader {\n\n\tif r.internalByteBuffer.Len() != 0 {\n\t\treturn r.internalByteBuffer\n\t}\n\treturn r\n}", "func (client *KeyVaultClient) getCertificateHandleResponse(resp *http.Response) (KeyVaultClientGetCertificateResponse, error) {\n\tresult := KeyVaultClientGetCertificateResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.CertificateBundle); err != nil {\n\t\treturn KeyVaultClientGetCertificateResponse{}, err\n\t}\n\treturn result, nil\n}", "func (cr CompileResponse) GetBody() string {\n\tif cr.Body == nil {\n\t\treturn \"\"\n\t}\n\n\treturn *cr.Body\n}", "func (c Client) get(path string) ([]byte, error) {\n\turl := c.Host + path\n\n\tresp, err := c.HTTPClient.Get(url)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tdefer resp.Body.Close()\n\n\tbody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn body, nil\n}", "func fetchOCSPResponseFromService(clientCert, issuerCert *x509.Certificate, ocspServer string) ([]byte, error) {\n\n\topts := &ocsp.RequestOptions{Hash: crypto.SHA1}\n\n\tbuffer, err := ocsp.CreateRequest(clientCert, issuerCert, opts)\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"error: error <%v> at ocsp.CreateRequest()\", err)\n\t\treturn nil, errors.New(message)\n\t}\n\n\thttpRequest, err := http.NewRequest(http.MethodPost, ocspServer, bytes.NewBuffer(buffer))\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"error: error <%v> at http.NewRequest()\", err)\n\t\treturn nil, errors.New(message)\n\t}\n\n\tocspURL, err := url.Parse(ocspServer)\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"error: error <%v> at url.Parse()\", err)\n\t\treturn nil, errors.New(message)\n\t}\n\n\thttpRequest.Header.Add(\"Content-Type\", \"application/ocsp-request\")\n\thttpRequest.Header.Add(\"Accept\", \"application/ocsp-response\")\n\thttpRequest.Header.Add(\"host\", ocspURL.Host)\n\n\thttpClient := &http.Client{\n\t\tTimeout: time.Duration(*timeout) * time.Second,\n\t}\n\n\thttpResponse, err := httpClient.Do(httpRequest)\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"error: error <%v> at httpClient.Do()\", err)\n\t\treturn nil, errors.New(message)\n\t}\n\tdefer httpResponse.Body.Close()\n\n\toutput, err := ioutil.ReadAll(httpResponse.Body)\n\tif err != nil {\n\t\tmessage := fmt.Sprintf(\"error: error <%v> at ioutil.ReadAll()\", err)\n\t\treturn nil, errors.New(message)\n\t}\n\n\treturn output, nil\n}", "func getHTTP(uri string, sslVerify bool, timeout time.Duration) (io.ReadCloser, error) {\n\ttr := &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: !sslVerify}}\n\tclient := http.Client{\n\t\tTimeout: timeout,\n\t\tTransport: tr,\n\t\tCheckRedirect: redirectPolicyFunc,\n\t}\n\n\treq, err := http.NewRequest(\"GET\", uri, nil)\n\treq.Header.Add(\"Authorization\", \"Basic \"+globalBasicAuthString)\n\tresp, err := client.Do(req)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !(resp.StatusCode >= 200 && resp.StatusCode < 300) {\n\t\tresp.Body.Close()\n\t\treturn nil, fmt.Errorf(\"HTTP status %d\", resp.StatusCode)\n\t}\n\treturn resp.Body, nil\n}", "func ReadURL(url string, client *http.Client, header *http.Header) (body []byte, err error) {\n\treq, err := http.NewRequest(\"GET\", url, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif header != nil {\n\t\treq.Header = *header\n\t}\n\tresp, err := client.Do(req)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tdefer resp.Body.Close()\n\n\tif resp.StatusCode != http.StatusOK {\n\t\tklog.V(2).InfoS(\"Failed to read URL\", \"statusCode\", resp.StatusCode, \"URL\", url)\n\t\treturn nil, &HTTPError{\n\t\t\tStatusCode: resp.StatusCode,\n\t\t\tURL: url,\n\t\t}\n\t}\n\n\tlimitedReader := &io.LimitedReader{R: resp.Body, N: maxReadLength}\n\tcontents, err := io.ReadAll(limitedReader)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif limitedReader.N <= 0 {\n\t\treturn nil, errors.New(\"the read limit is reached\")\n\t}\n\n\treturn contents, nil\n}", "func (balr BlobsAcquireLeaseResponse) LastModified() time.Time {\n\ts := balr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}", "func (client *CertificateOrdersClient) getCertificateHandleResponse(resp *http.Response) (CertificateOrdersClientGetCertificateResponse, error) {\n\tresult := CertificateOrdersClientGetCertificateResponse{}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.CertificateResource); err != nil {\n\t\treturn CertificateOrdersClientGetCertificateResponse{}, err\n\t}\n\treturn result, nil\n}", "func (cclr ContainersChangeLeaseResponse) LastModified() time.Time {\n\ts := cclr.rawResponse.Header.Get(\"Last-Modified\")\n\tif s == \"\" {\n\t\treturn time.Time{}\n\t}\n\tt, err := time.Parse(time.RFC1123, s)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\treturn t\n}" ]
[ "0.6419577", "0.6070379", "0.5902184", "0.58461803", "0.5801817", "0.569155", "0.55661666", "0.5530602", "0.55289286", "0.55128527", "0.55013573", "0.54246235", "0.54109025", "0.5397433", "0.53965473", "0.5387051", "0.537821", "0.53354466", "0.53154707", "0.5310212", "0.53008664", "0.5299097", "0.52954674", "0.52899915", "0.52714735", "0.5243703", "0.5226115", "0.5202097", "0.51934236", "0.51886475", "0.5180633", "0.5154551", "0.5150562", "0.51418394", "0.51361656", "0.5134083", "0.51331615", "0.51317966", "0.51273346", "0.51158214", "0.5112059", "0.51115084", "0.5082699", "0.50793535", "0.5061013", "0.50537795", "0.50532895", "0.5050447", "0.5046269", "0.5039361", "0.50374466", "0.5021596", "0.50118965", "0.50089926", "0.50068235", "0.50024295", "0.4978457", "0.49712655", "0.49667713", "0.49625686", "0.4951775", "0.49515542", "0.49441975", "0.49402228", "0.49248916", "0.49189144", "0.49158087", "0.4907527", "0.49057457", "0.49047494", "0.48949337", "0.4892172", "0.4887332", "0.48812073", "0.4880564", "0.4879531", "0.48722124", "0.48707506", "0.48695257", "0.4868507", "0.48636112", "0.48533377", "0.4849511", "0.48416167", "0.48405707", "0.48398703", "0.48380512", "0.48371986", "0.48341468", "0.48271284", "0.4820974", "0.4818113", "0.48116657", "0.48113814", "0.48037228", "0.48015618", "0.47962013", "0.4791093", "0.4790077", "0.47900328" ]
0.6279517
1
Log is a function required to meet the interface required by statsd
func (s *Server) Log(keyvals ...interface{}) error { log.Warning(keyvals...) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *T) Log(args ...interface{})", "func Log(fmt string, args ...interface{}) {}", "func (c *B) Log(args ...interface{})", "func (l eventlog) Log(context interface{}, name string, message string, data ...interface{}) {}", "func Log() {\n\n}", "func (s *SilentLogger) Log(v ...interface{}) {}", "func (ses *Ses) log(enabled bool, v ...interface{}) {\n\tLog := _drv.Cfg().Log\n\tif !Log.IsEnabled(enabled) {\n\t\treturn\n\t}\n\tif len(v) == 0 {\n\t\tLog.Logger.Infof(\"%v %v\", ses.sysName(), callInfo(2))\n\t} else {\n\t\tLog.Logger.Infof(\"%v %v %v\", ses.sysName(), callInfo(2), fmt.Sprint(v...))\n\t}\n}", "func (_m *T) Log(args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}", "func Log(msg string, err error) {\n\n}", "func (c *T) Logf(format string, args ...interface{})", "func fnLog(ctx Context, doc *JDoc, params []string) interface{} {\n\tstats := ctx.Value(EelTotalStats).(*ServiceStats)\n\tif params == nil || len(params) != 1 {\n\t\tctx.Log().Error(\"error_type\", \"func_log\", \"op\", \"log\", \"cause\", \"wrong_number_of_parameters\", \"params\", params)\n\t\tstats.IncErrors()\n\t\tAddError(ctx, SyntaxError{fmt.Sprintf(\"wrong number of parameters in call to log function\"), \"log\", params})\n\t\treturn nil\n\t}\n\tval := extractStringParam(params[0])\n\tctx.Log().Info(\"action\", \"logfn\", \"value\", val)\n\treturn \"\"\n}", "func (backends Backends) log_stats() {\n\tfor _, backend := range backends {\n\t\tdowntime := backend.downtime\n\t\tif backend.failed {\n\t\t\tdowntime += time.Now().Sub(backend.failedTime)\n\t\t}\n\t\tlog.Printf(\"STATS: <%s> failed=%v (downtime=%v) requests=%d bytes=%d errors=%d last=%s\",\n\t\t\tbackend.address, backend.failed, downtime, backend.requests,\n\t\t\tbackend.transferred, backend.errors, backend.timestamp)\n\t}\n}", "func (env *Env) log(enabled bool, v ...interface{}) {\n\tLog := _drv.Cfg().Log\n\tif !Log.IsEnabled(enabled) {\n\t\treturn\n\t}\n\tif len(v) == 0 {\n\t\tLog.Logger.Infof(\"%v %v\", env.sysName(), callInfo(1))\n\t} else {\n\t\tLog.Logger.Infof(\"%v %v %v\", env.sysName(), callInfo(1), fmt.Sprint(v...))\n\t}\n}", "func Logging(ctx context.Context, request interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (response interface{}, err error) {\r\n\tstart := time.Now()\r\n\r\n\tlog.CtxInfof(ctx, \"calling %s, request=%s\", info.FullMethod, marshal(request))\r\n\tresponse, err = handler(ctx, request)\r\n\tlog.CtxInfof(ctx, \"finished %s, cost=%v, response=%v, err=%v\", info.FullMethod, time.Since(start), response, err)\r\n\r\n\treturn response, err\r\n}", "func logStats() {\n\tif requestsReceived > 0 {\n\t\tavgRequestMs := strconv.FormatFloat(totalRequestTime/float64(requestsReceived), 'f', 3, 64)\n\t\tlogger.Printf(\"%d requests, %d bytes received (avg. %sms)\\n\", requestsReceived, bytesReceived, avgRequestMs)\n\t}\n\trequestsReceived = 0\n\tbytesReceived = 0\n\ttotalRequestTime = 0.0\n}", "func (f Freckle) log(msg string, data ...interface{}) {\n\tif f.debug {\n\t\tlog.Printf(\"DEBUG: %s\", fmt.Sprintf(msg, data...))\n\t}\n}", "func writeLog() {\n}", "func (self *State)Log(a any)any{\n self.IncOperations(self.coeff[\"log\"]+self.off[\"log\"])\n return wrap1(a,math.Log)\n}", "func (t t) Log(args ...interface{}) {\n\tfmt.Println(args...)\n}", "func (l *Logger) log(level int64, v string) { l.doMsg(level, v) }", "func (c *Counter) Log(msg string) {\n\tlog.Infof(\"%s : count %d avg %.8g +/- %.4g min %g max %g sum %.9g\",\n\t\tmsg, c.Count, c.Avg(), c.StdDev(), c.Min, c.Max, c.Sum)\n}", "func (c *B) Logf(format string, args ...interface{})", "func (s *severity) Log(details ...Jsonable) {\n\t_, _ = log(s, details...)\n}", "func logItAll(log lg.Log) {\n\tlog.Debug(\"Debug msg\")\n\tlog.Debugf(\"Debugf msg\")\n\tlog.Warn(\"Warn msg\")\n\tlog.Warnf(\"Warnf msg\")\n\tlog.Error(\"Error msg\")\n\tlog.Errorf(\"Errorf msg\")\n\n\tlog.WarnIfError(nil)\n\tlog.WarnIfError(errors.New(\"error: WarnIfError msg\"))\n\n\tlog.WarnIfFuncError(nil)\n\tlog.WarnIfFuncError(func() error { return nil })\n\tlog.WarnIfFuncError(func() error { return errors.New(\"error: WarnIfFuncError msg\") })\n\n\tlog.WarnIfCloseError(nil)\n\tlog.WarnIfCloseError(errCloser{})\n}", "func logItAll(log lg.Log) {\n\tlog.Debug(\"Debug msg\")\n\tlog.Debugf(\"Debugf msg\")\n\tlog.Warn(\"Warn msg\")\n\tlog.Warnf(\"Warnf msg\")\n\tlog.Error(\"Error msg\")\n\tlog.Errorf(\"Errorf msg\")\n\n\tlog.WarnIfError(nil)\n\tlog.WarnIfError(errors.New(\"error: WarnIfError msg\"))\n\n\tlog.WarnIfFuncError(nil)\n\tlog.WarnIfFuncError(func() error { return nil })\n\tlog.WarnIfFuncError(func() error { return errors.New(\"error: WarnIfFuncError msg\") })\n\n\tlog.WarnIfCloseError(nil)\n\tlog.WarnIfCloseError(errCloser{})\n}", "func PicUsefulLog() {\n\n}", "func (s *SilentLogger) Logf(f string, v ...interface{}) {}", "func (kv *KVServer) Log(level LogLevel, a ...interface{}) {\n\tif !kv.killed() && level >= SetLogLevel {\n\t\tpc, _, ln, _ := runtime.Caller(1)\n\t\trp := regexp.MustCompile(\".+\\\\.([a-zA-Z]+)\")\n\t\tfuncName := rp.FindStringSubmatch(runtime.FuncForPC(pc).Name())[1]\n\t\tdata := append([]interface{}{level, \"[ KV\", kv.me, \"]\", \"[\", funcName, ln, \"]\"}, a...)\n\t\tfmt.Println(data...)\n\t}\n}", "func logAccess(c *routing.Context, logFunc access.LogFunc, start time.Time) {\n\trw := c.Response.(*access.LogResponseWriter)\n\telapsed := float64(time.Now().Sub(start).Nanoseconds()) / 1e6\n\trequestLine := fmt.Sprintf(\"%s %s %s\", c.Request.Method, c.Request.URL.Path, c.Request.Proto)\n\tlogFunc(`[%.3fms] %s %d %d`, elapsed, requestLine, rw.Status, rw.BytesWritten)\n}", "func (l *littr) Log(v int, s interface{}) {\n\t// Check logging level\n\tif v > l.v {\n\t\treturn\n\t}\n\tt := time.Now()\n\tfmt.Printf(\"%v%v\\n\", t.Format(\"15:04:05 \"), s)\n}", "func (g *Gate) Log(kvs ...interface{}) {\n\tif !g.Disabled() {\n\t\tg.Logger.Log(kvs...)\n\t}\n}", "func (f *StatsReq) AddLog(pageLog *PageLog) {\n\n}", "func logTo(ctx context.Context, logLevel LogLevel, logKey LogKey, format string, args ...interface{}) {\n\t// Defensive bounds-check for log level. All callers of this function should be within this range.\n\tif logLevel < LevelNone || logLevel >= levelCount {\n\t\treturn\n\t}\n\n\tif logLevel == LevelError {\n\t\tSyncGatewayStats.GlobalStats.ResourceUtilizationStats().ErrorCount.Add(1)\n\t} else if logLevel == LevelWarn {\n\t\tSyncGatewayStats.GlobalStats.ResourceUtilizationStats().WarnCount.Add(1)\n\t}\n\n\tshouldLogConsole := consoleLogger.shouldLog(logLevel, logKey)\n\tshouldLogError := errorLogger.shouldLog(logLevel)\n\tshouldLogWarn := warnLogger.shouldLog(logLevel)\n\tshouldLogInfo := infoLogger.shouldLog(logLevel)\n\tshouldLogDebug := debugLogger.shouldLog(logLevel)\n\tshouldLogTrace := traceLogger.shouldLog(logLevel)\n\n\t// exit early if we aren't going to log anything anywhere.\n\tif !(shouldLogConsole || shouldLogError || shouldLogWarn || shouldLogInfo || shouldLogDebug || shouldLogTrace) {\n\t\treturn\n\t}\n\n\t// Prepend timestamp, level, log key.\n\tformat = addPrefixes(format, ctx, logLevel, logKey)\n\n\t// Warn and error logs also append caller name/line numbers.\n\tif logLevel <= LevelWarn && logLevel > LevelNone {\n\t\tformat += \" -- \" + GetCallersName(2, true)\n\t}\n\n\t// Perform log redaction, if necessary.\n\targs = redact(args)\n\n\tif shouldLogConsole {\n\t\tconsoleLogger.logf(color(format, logLevel), args...)\n\t}\n\tif shouldLogError {\n\t\terrorLogger.logf(format, args...)\n\t}\n\tif shouldLogWarn {\n\t\twarnLogger.logf(format, args...)\n\t}\n\tif shouldLogInfo {\n\t\tinfoLogger.logf(format, args...)\n\t}\n\tif shouldLogDebug {\n\t\tdebugLogger.logf(format, args...)\n\t}\n\tif shouldLogTrace {\n\t\ttraceLogger.logf(format, args...)\n\t}\n}", "func logMetric(level int, key string, value string, unit string) {\n\tif level < logLevel {\n\t\tfmt.Fprintf(&buf, \"METRIC|%d|%s|%s|%s\\n\", level, key, value, unit)\n\t}\n}", "func Info(msg string) {\n log.Info(msg)\n}", "func (p *HTTPPool) Log(format string,v ...interface{}) {\n\tlog.Printf(\"[Server %s]%s\",p.self,fmt.Sprintf(format,v...))\n}", "func (self *GameHeart) Logs(msg *HeartMessageType) {\n\n}", "func Log(r Registry, freq time.Duration, l Logger) {\n\tLogScaled(r, freq, time.Nanosecond, l)\n}", "func Log(logger log.Logger, begin time.Time, err error, additionalKVs ...interface{}) {\n\tpc, _, _, _ := runtime.Caller(1)\n\tcaller := strings.Split(runtime.FuncForPC(pc).Name(), \".\")\n\tdefaultKVs := []interface{}{\n\t\t\"method\", caller[len(caller)-2],\n\t\t\"took\", time.Since(begin),\n\t\t\"success\", fmt.Sprint(err == nil),\n\t}\n\n\tif err != nil {\n\t\tdefaultKVs = append(defaultKVs, \"err\")\n\t\tdefaultKVs = append(defaultKVs, err)\n\t\tlevel.Error(logger).Log(defaultKVs...)\n\t} else {\n\t\tlevel.Info(logger).Log(append(defaultKVs, additionalKVs...)...)\n\t}\n}", "func logging() {\n\tfmt.Println(\"Selesai memanggil function\")\n\tfmt.Println(\"\")\n}", "func (s *DBStore) LogStats() {\n\tvar requestCount, statisticCount int\n\ts.db.Table(\"requests\").Count(&requestCount)\n\ts.db.Table(\"statistics\").Count(&statisticCount)\n\tdbLogger.Infof(\"Cached requests: %d. Statistic entries count: %d\", requestCount, statisticCount)\n}", "func (lgr) Log(v ...interface{}) {\n\tlog.Println(v...)\n}", "func ExampleLog() {\n\tsetup()\n\tlog.Log().Msg(\"hello world\")\n\n\t// Output: {\"time\":1199811905,\"message\":\"hello world\"}\n}", "func logGeneral(v reflect.Value, prefix string) {\n\tlogGeneralWithLogger(v, prefix, glog.Infof)\n}", "func (lm *loggingMiddleware) logging(begin time.Time, method string, err error) {\n\t_ = lm.logger.Log(\"method\", method, \"took\", time.Since(begin), \"err\", err)\n}", "func (data *AccountData) LoggingAddDetails() {\n\n}", "func (l eventlog) Log(context interface{}, name string, message string, data ...interface{}) {\n\tlog.Dev(context, name, message, data...)\n}", "func (l eventlog) Log(context interface{}, name string, message string, data ...interface{}) {\n\tlog.Dev(context, name, message, data...)\n}", "func (d *dispatcher) log(fmt string, v ...interface{}) {\n\tif d.logger != nil {\n\t\td.logger.Printf(fmt, v...)\n\t}\n}", "func Trace(v ...interface{}){\n log.Trace(v)\n}", "func Statsf(category string, format string, params ...interface{}) {\n\tDefaultLogger.Statsf(category, format, params...)\n}", "func (rf *Raft) Log(level LogLevel, a ...interface{}) {\n\tif !rf.killed() && level >= SetLogLevel {\n\t\tpc, _, ln, _ := runtime.Caller(1)\n\t\trp := regexp.MustCompile(\".+\\\\.([a-zA-Z]+)\")\n\t\tfuncName := rp.FindStringSubmatch(runtime.FuncForPC(pc).Name())[1]\n\t\tst := \"F\"\n\t\tif rf.state == Leader {\n\t\t\tst = \"L\"\n\t\t} else if rf.state == Candidate {\n\t\t\tst = \"C\"\n\t\t}\n\t\tdata := append([]interface{}{level, \"[ Server\", rf.me, \"- term\", rf.currentTerm, st, \"]\", \"[\", funcName, ln, \"]\"}, a...)\n\t\tfmt.Println(data...)\n\t}\n}", "func tsAltLog(fun interface{}, e *error) {\n\tif *e != nil {\n\t\ttsAltLogger.Printf(\"%s: %s\\n\", function.Name(fun), *e)\n\t}\n}", "func Log(a ...interface{}) {\n\tlogrus.Info(strings.TrimSpace(spew.Sdump(a...)))\n}", "func (o *os) log(l Level, m string) error {\n\t// discard if we're not at the right level\n\tif l < o.opts.Level {\n\t\treturn nil\n\t}\n\n\te := &Event{\n\t\tTimestamp: time.Now().Format(\"2006-1-2 15:04:05\"),\n\t\tLevel: l,\n\t\tFields: o.opts.Fields,\n\t\tMessage: m,\n\t}\n\n\t// write directly.\n\tif err := Send(e); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}", "func logEmit(final bool) {\n\terr := emit(final)\n\tif err != nil {\n\t\tlog.Printf(\"[ERROR] Sending telemetry: %v\", err)\n\t}\n}", "func (l *Logger) Trace(message string, args ...interface{}) { l.Log(Trace, message, args...) }", "func (nl *NullLogger) LogInfo(m ...interface{}) {\n}", "func log(h http.HandlerFunc)http.HandlerFunc{\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\tlogg.Println(\"http handler: \",runtime.FuncForPC(reflect.ValueOf(h).Pointer()).Name())\n\t\th(w,r)\n\t}\n}", "func (fugs *FileUploadGlobalState) log(a string, args ...interface{}) {\n\tLogMsg(\"file_upload\", a, args...)\n}", "func (l List) Log(args ...interface{}) {\n\tlogHelper(2, l, args...)\n}", "func Log(t testing.TestingT, args ...interface{}) {\n\tif tt, ok := t.(helper); ok {\n\t\ttt.Helper()\n\t}\n\n\tDoLog(t, 2, os.Stdout, args...)\n}", "func (b *BandwidthCollector) LogRecvMessage(int64) {}", "func (ses *Ses) logF(enabled bool, format string, v ...interface{}) {\n\tLog := _drv.Cfg().Log\n\tif !Log.IsEnabled(enabled) {\n\t\treturn\n\t}\n\tif len(v) == 0 {\n\t\tLog.Logger.Infof(\"%v %v\", ses.sysName(), callInfo(2))\n\t} else {\n\t\tLog.Logger.Infof(\"%v %v %v\", ses.sysName(), callInfo(2), fmt.Sprintf(format, v...))\n\t}\n}", "func (vm *VM) log(v ...Data) Tuple {\n\tif vm.Finalized {\n\t\tpanic(ErrFinalized)\n\t}\n\tt := Tuple(v)\n\tvm.chargeCreate(t)\n\tvm.Log = append(vm.Log, t)\n\tvm.runHooks(vm.onLog)\n\treturn t\n}", "func (*traceLogger) Log(msg string) { log.Errorf(msg) }", "func record(ctx context.Context, ms ...stats.Measurement) {\n\tstats.Record(ctx, ms...)\n}", "func logFn(l log.Level, msg string, a ...interface{}) {\n\tlog.Logf(logger(), l, msg, a...)\n}", "func TimeLog(nextHandler http.Handler) func(http.ResponseWriter, *http.Request) {\n\treturn func(w http.ResponseWriter, r *http.Request) {\n\t\t// log.Debugf(\"Request received : %v\", r)\n\t\tstart := time.Now()\n\n\t\t// make the call\n\t\tv := capturewriter.CaptureWriter{ResponseWriter: w, StatusCode: 0}\n\t\tctx := context.Background()\n\t\tnextHandler.ServeHTTP(&v, r.WithContext(ctx))\n\n\t\t// Stop timer\n\t\tend := time.Now()\n\n\t\tgo func() {\n\t\t\tlatency := end.Sub(start)\n\t\t\treq++\n\t\t\tavgLatency = avgLatency + ((int64(latency) - avgLatency) / req)\n\t\t\t// log.Debugf(\"Request handled successfully: %v\", v.GetStatusCode())\n\t\t\tvar statusCode = v.GetStatusCode()\n\n\t\t\tpath := r.URL.Path\n\t\t\thost := r.Host\n\t\t\treferer := r.Header.Get(\"Referer\")\n\t\t\tclientIP := r.RemoteAddr\n\t\t\tmethod := r.Method\n\n\t\t\tlog.Infow(fmt.Sprintf(\"|%d| %10v %s\", statusCode, time.Duration(latency), path),\n\t\t\t\t\"statusCode\", statusCode,\n\t\t\t\t\"request\", req,\n\t\t\t\t\"latency\", time.Duration(latency),\n\t\t\t\t\"avgLatency\", time.Duration(avgLatency),\n\t\t\t\t\"ipPort\", clientIP,\n\t\t\t\t\"method\", method,\n\t\t\t\t\"host\", host,\n\t\t\t\t\"path\", path,\n\t\t\t\t\"referer\", referer,\n\t\t\t)\n\t\t}()\n\n\t}\n}", "func (m *ContextMeta) Log() {\n\tfields := []zapcore.Field{\n\t\tzap.String(\"server_name\", m.ServerName.String()),\n\t\tzap.Bool(\"acme\", m.ACME.Load()),\n\t\tzap.Bool(\"fixed\", m.ACME.Load()),\n\t\tzap.String(\"protocol\", Protocol(m.Protocol.Load()).String()),\n\t\tzap.Duration(\"duration\", m.End.Sub(m.Start)),\n\t}\n\tif m.NoMatch.Load() {\n\t\tzlg.Info(\"FAILED\", fields...)\n\t\treturn\n\t}\n\tzlg.Info(\"PASS\", fields...)\n}", "func VoidLogger(format string, args ...interface{}) {\n\n}", "func Logger(c context.Context) loggers.Advanced", "func (uplink *SenseUplink) LogMetric(invocation *enigma.Invocation, metrics *enigma.InvocationMetrics, result *enigma.InvocationResponse) {\n\trequestID := -1\n\tif result != nil {\n\t\trequestID = result.RequestID\n\t}\n\n\tvar method string\n\tvar params string\n\tif invocation != nil {\n\t\tmethod = invocation.Method\n\t\tif invocation.RemoteObject != nil && strings.TrimSpace(invocation.RemoteObject.GenericId) != \"\" {\n\t\t\tbuf := helpers.NewBuffer()\n\t\t\tbuf.WriteString(method)\n\t\t\tbuf.WriteString(\" [\")\n\t\t\tbuf.WriteString(invocation.RemoteObject.GenericId)\n\t\t\tbuf.WriteString(\"]\")\n\t\t\tif buf.Error == nil {\n\t\t\t\tmethod = buf.String()\n\t\t\t}\n\t\t}\n\t\tif invocation.Params != nil {\n\t\t\tif jB, err := json.Marshal(invocation.Params); err == nil && jB != nil {\n\t\t\t\tparams = string(jB)\n\t\t\t}\n\t\t}\n\t}\n\n\tif err := uplink.trafficMetrics.Update(metrics.SocketWriteTimestamp, metrics.SocketReadTimestamp,\n\t\tint64(metrics.RequestMessageSize), int64(metrics.ResponseMessageSize)); err != nil {\n\t\tuplink.logEntry.LogError(err)\n\t}\n\n\tif uplink.Traffic != nil {\n\t\tuplink.logEntry.LogTrafficMetric(metrics.SocketReadTimestamp.Sub(metrics.SocketWriteTimestamp).Nanoseconds(),\n\t\t\tuint64(metrics.RequestMessageSize), uint64(metrics.ResponseMessageSize), requestID, method, params, \"WS\", \"\")\n\t}\n\n\treqStall := metrics.SocketWriteTimestamp.Sub(metrics.InvocationRequestTimestamp)\n\tif reqStall > constant.MaxStallTime {\n\t\tuplink.logEntry.LogDetail(logger.WarningLevel, \"WS request stall\", strconv.FormatInt(reqStall.Nanoseconds(), 10))\n\t}\n\n\trespStall := metrics.InvocationResponseTimestamp.Sub(metrics.SocketReadTimestamp)\n\tif !metrics.InvocationRequestTimestamp.IsZero() && !metrics.SocketReadTimestamp.IsZero() && respStall > constant.MaxStallTime {\n\t\tuplink.logEntry.LogDetail(logger.WarningLevel, \"WS response stall\", strconv.FormatInt(respStall.Nanoseconds(), 10))\n\t}\n\n}", "func Log(s string, v ...interface{}) {\n\tif debug {\n\t\tlog.Debug().Msgf(s, v...)\n\t}\n}", "func (_m *T) Logf(format string, args ...interface{}) {\n\tvar _ca []interface{}\n\t_ca = append(_ca, format)\n\t_ca = append(_ca, args...)\n\t_m.Called(_ca...)\n}", "func doLog(level string, format string, v ...interface{}) {\n\tmsg := formatLog(level, format, v...)\n\tdoRawLog(msg)\n}", "func doLog(ctx context.Context) {\n\t_, file, line, _ := runtime.Caller(0)\n\tlogPos.File, logPos.Line = file, line+2\n\tdlog.Infof(ctx, \"grep for this\")\n}", "func (p *plugin) cmdLog(w irc.ResponseWriter, r *irc.Request, params cmd.ParamList) {\n\tif params.Len() > 0 {\n\t\tp.profile.SetLogging(params.Bool(0))\n\t}\n\n\tif p.profile.Logging() {\n\t\tproto.PrivMsg(w, r.SenderName, TextLogEnabled)\n\t} else {\n\t\tproto.PrivMsg(w, r.SenderName, TextLogDisabled)\n\t}\n}", "func (ptr *KeyholeInfo) Log(s string) {\n\tptr.Logs = append(ptr.Logs, fmt.Sprintf(`%v %v`, time.Now().Format(time.RFC3339), s))\n}", "func Log(t *testing.T, args ...interface{}) {\n\tDoLog(t, 2, os.Stdout, args...)\n}", "func log(args ...Any) {\n\tfmt.Println(args...)\n}", "func (s *BasejossListener) EnterFuncLog(ctx *FuncLogContext) {}", "func (h funcHandler) Log(r *Record) error {\n\treturn h(r)\n}", "func (s *Service) OpLog(c context.Context, cid, operator int64, typ int32, dmids []int64, subject, originVal, currentVal, remark string, source oplog.Source, operatorType oplog.OperatorType) (err error) {\n\tinfoLog := new(oplog.Infoc)\n\tinfoLog.Oid = cid\n\tinfoLog.Type = typ\n\tinfoLog.DMIds = dmids\n\tinfoLog.Subject = subject\n\tinfoLog.OriginVal = originVal\n\tinfoLog.CurrentVal = currentVal\n\tinfoLog.OperationTime = strconv.FormatInt(time.Now().Unix(), 10)\n\tinfoLog.Source = source\n\tinfoLog.OperatorType = operatorType\n\tinfoLog.Operator = operator\n\tinfoLog.Remark = remark\n\tselect {\n\tcase s.opsLogCh <- infoLog:\n\tdefault:\n\t\terr = fmt.Errorf(\"opsLogCh full\")\n\t\tlog.Error(\"opsLogCh full (%v)\", infoLog)\n\t}\n\treturn\n}", "func logging(next routing.Handler, logger balabol.Logger) routing.Handler {\n\treturn func(ctx *routing.Context) error {\n\t\tt1 := time.Now()\n\t\terr := next(ctx)\n\t\telapsed := time.Since(t1)\n\t\tlogger.Printf(`[%15s] %s \"%s\" %d %s`, ctx.RemoteIP(), ctx.Method(), ctx.RequestURI(), ctx.Response.StatusCode(), elapsed)\n\n\t\treturn err\n\t}\n}", "func (m *Monitor) logf(format string, v ...interface{}) {\n\tm.cctx.ll.Printf(m.iface+\": \"+format, v...)\n}", "func (l *GrpcLog) Info(args ...interface{}) {\n\t// l.SugaredLogger.Info(args...)\n}", "func nflog(format string, args ...any) {\n\tif enableLogging && log.IsLogging(log.Debug) {\n\t\tlog.Debugf(\"netfilter: \"+format, args...)\n\t}\n}", "func Log(ctx context.Context, keyvals ...interface{}) error {\n\treturn ReqLogger(ctx).Log(keyvals...)\n}", "func logNormal(stat status, t time.Time, ctx ...interface{}) {\n\tout := format(stat, t, separateWithSpaces(ctx...))\n\tnormalLogger.Println(out)\n}", "func DoLog(t testing.TestingT, callDepth int, writer io.Writer, args ...interface{}) {\n\tdate := time.Now()\n\tprefix := fmt.Sprintf(\"%s %s %s:\", t.Name(), date.Format(time.RFC3339), CallerPrefix(callDepth+1))\n\tallArgs := append([]interface{}{prefix}, args...)\n\tfmt.Fprintln(writer, allArgs...)\n}", "func (lc mockNotifyLogger) Trace(msg string, args ...interface{}) {\n}", "func DoLog(t *testing.T, callDepth int, writer io.Writer, args ...interface{}) {\n\tdate := time.Now()\n\tprefix := fmt.Sprintf(\"%s %s %s:\", t.Name(), date.Format(time.RFC3339), CallerPrefix(callDepth+1))\n\tallArgs := append([]interface{}{prefix}, args...)\n\tfmt.Fprintln(writer, allArgs...)\n}", "func (b *BandwidthCollector) LogSentMessage(int64) {}", "func (p PollerJob)Log(args...interface{}){\n\tlog.Println(args...)\n}", "func (d *DummyLogger) Info(format string) {}", "func (a *Api) logAudit(req *http.Request, format string, args ...interface{}) {\n\tvar prefix string\n\n\t// Get token from request\n\ttd := a.auth.Authenticate(req)\n\tisServer := td != nil && td.IsServer\n\n\tif req.RemoteAddr != \"\" {\n\t\tprefix = fmt.Sprintf(\"remoteAddr{%s}, \", req.RemoteAddr)\n\t}\n\n\ttraceSession := req.Header.Get(TP_TRACE_SESSION)\n\tif traceSession != \"\" {\n\t\tprefix += fmt.Sprintf(\"trace{%s}, \", traceSession)\n\t}\n\n\tprefix += fmt.Sprintf(\"isServer{%t}, \", isServer)\n\n\ts := fmt.Sprintf(format, args...)\n\ta.logger.Printf(\"%s%s\", prefix, s)\n}", "func Notice(args ...interface{}) {\n LoggerOf(default_id).Notice(args...)\n}", "func UnaryLogging(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) {\n\tstart := time.Now()\n\t//TODO get request id\n\n\t//What info can we and should we log here\n\tLogger.InfoC(\n\t\tctx,\n\t\t\"\",\n\t\ttag.String(\"FullMethod\", info.FullMethod),\n\t\ttag.String(\"t\", time.Now().String()))\n\n\tresp, err = handler(ctx, req)\n\n\tLogger.InfoC(\n\t\tctx,\n\t\t\"\",\n\t\ttag.String(\"t\", time.Now().String()),\n\t\ttag.String(\"duration\", time.Since(start).String()))\n\n\treturn resp, err\n}", "func (r *reporter) Log(args ...interface{}) {\n\tr.logs.log(fmt.Sprint(args...))\n}" ]
[ "0.7238076", "0.6867503", "0.6801787", "0.67710364", "0.6668824", "0.6576334", "0.6487447", "0.64463216", "0.64417726", "0.63560534", "0.63384145", "0.6316809", "0.631134", "0.6299842", "0.6289493", "0.6259608", "0.6240145", "0.62383914", "0.6229146", "0.62253636", "0.62004167", "0.61976236", "0.6195657", "0.6182333", "0.6182333", "0.6180631", "0.6176577", "0.6176173", "0.6160692", "0.61478233", "0.61304295", "0.61215085", "0.6106173", "0.6094008", "0.6087742", "0.6046314", "0.60456455", "0.60344136", "0.6028022", "0.6025757", "0.6002688", "0.5996581", "0.59948814", "0.59947515", "0.5979281", "0.5969011", "0.5965135", "0.5965135", "0.59630793", "0.5962422", "0.594845", "0.59438366", "0.59368557", "0.5918917", "0.59129846", "0.5908038", "0.59018034", "0.5895702", "0.58844376", "0.5882022", "0.58762187", "0.5857988", "0.58572656", "0.5855444", "0.58491564", "0.58463585", "0.58413583", "0.5840833", "0.58387214", "0.58260846", "0.58151144", "0.5812903", "0.58117515", "0.5808329", "0.5800312", "0.5799565", "0.57985383", "0.5794454", "0.5790152", "0.5787545", "0.57846934", "0.5784183", "0.5767532", "0.57616025", "0.57590884", "0.57487255", "0.5743617", "0.5743098", "0.57424146", "0.573812", "0.5730168", "0.5724085", "0.5721162", "0.57178", "0.57162887", "0.5716223", "0.57122225", "0.57061887", "0.5704967", "0.57047427" ]
0.58768594
60
Validate checks the field values on UpsertEventRequest with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *UpsertEventRequest) Validate() error { if m == nil { return nil } // no validation rules for Id if utf8.RuneCountInString(m.GetName()) < 1 { return UpsertEventRequestValidationError{ field: "Name", reason: "value length must be at least 1 runes", } } if utf8.RuneCountInString(m.GetParticipants()) < 4 { return UpsertEventRequestValidationError{ field: "Participants", reason: "value length must be at least 4 runes", } } for idx, item := range m.GetSections() { _, _ = idx, item if v, ok := interface{}(item).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return UpsertEventRequestValidationError{ field: fmt.Sprintf("Sections[%v]", idx), reason: "embedded message failed validation", cause: err, } } } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (m *UpsertEventResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (m *UpsertEventRequest_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetDescription()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Description\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func NewUpsertRequest(space string) *UpsertRequest {\n\treq := new(UpsertRequest)\n\treq.initImpl(\"crud.upsert\")\n\treq.setSpace(space)\n\treq.tuple = Tuple{}\n\treq.operations = []Operation{}\n\treq.opts = UpsertOpts{}\n\treturn req\n}", "func (s *TestBase) UpsertRequestCancelState(updatedInfo *p.WorkflowExecutionInfo, updatedStats *p.ExecutionStats, updatedVersionHistories *p.VersionHistories,\n\tcondition int64, upsertCancelInfos []*p.RequestCancelInfo) error {\n\treturn s.UpdateWorkflowExecutionWithRangeID(updatedInfo, updatedStats, updatedVersionHistories, nil, nil,\n\t\ts.ShardInfo.RangeID, condition, nil, nil, nil,\n\t\tnil, nil, nil, nil, upsertCancelInfos, nil,\n\t\tnil, nil, nil, \"\")\n}", "func (uom *UpdateOneModel) SetUpsert(upsert bool) *UpdateOneModel {\n\tuom.Upsert = &upsert\n\treturn uom\n}", "func (payload *putEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (o *UpdateEventParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.Body != nil {\n\t\tif err := r.SetBodyParam(o.Body); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// path param eventId\n\tif err := r.SetPathParam(\"eventId\", o.EventID); err != nil {\n\t\treturn err\n\t}\n\n\t// path param koronaAccountId\n\tif err := r.SetPathParam(\"koronaAccountId\", o.KoronaAccountID); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (payload *PutEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func ConvertUserEventRequestToUsageEvent(req CreateUserEventRequest) (*usageeventsv1.UsageEventOneOf, error) {\n\tswitch req.Event {\n\tcase bannerClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiBannerClick{\n\t\t\t\tUiBannerClick: &usageeventsv1.UIBannerClickEvent{\n\t\t\t\t\tAlert: req.Alert,\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceClick{\n\t\t\t\tUiOnboardAddFirstResourceClick: &usageeventsv1.UIOnboardAddFirstResourceClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceLaterClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceLaterClick{\n\t\t\t\tUiOnboardAddFirstResourceLaterClick: &usageeventsv1.UIOnboardAddFirstResourceLaterClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiIntegrationEnrollStartEvent,\n\t\tuiIntegrationEnrollCompleteEvent:\n\n\t\tvar event IntegrationEnrollEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tkindEnum, ok := usageeventsv1.IntegrationEnrollKind_value[event.Kind]\n\t\tif !ok {\n\t\t\treturn nil, trace.BadParameter(\"invalid integration enroll kind %s\", event.Kind)\n\t\t}\n\n\t\tswitch req.Event {\n\t\tcase uiIntegrationEnrollStartEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollStartEvent{\n\t\t\t\tUiIntegrationEnrollStartEvent: &usageeventsv1.UIIntegrationEnrollStartEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\tcase uiIntegrationEnrollCompleteEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollCompleteEvent{\n\t\t\t\tUiIntegrationEnrollCompleteEvent: &usageeventsv1.UIIntegrationEnrollCompleteEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\t}\n\n\tcase uiDiscoverStartedEvent,\n\t\tuiDiscoverResourceSelectionEvent,\n\t\tuiDiscoverIntegrationAWSOIDCConnectEvent,\n\t\tuiDiscoverDatabaseRDSEnrollEvent,\n\t\tuiDiscoverDeployServiceEvent,\n\t\tuiDiscoverDatabaseRegisterEvent,\n\t\tuiDiscoverDatabaseConfigureMTLSEvent,\n\t\tuiDiscoverDatabaseConfigureIAMPolicyEvent,\n\t\tuiDiscoverDesktopActiveDirectoryToolsInstallEvent,\n\t\tuiDiscoverDesktopActiveDirectoryConfigureEvent,\n\t\tuiDiscoverAutoDiscoveredResourcesEvent,\n\t\tuiDiscoverPrincipalsConfigureEvent,\n\t\tuiDiscoverTestConnectionEvent,\n\t\tuiDiscoverCompletedEvent:\n\n\t\tvar discoverEvent DiscoverEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &discoverEvent); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tevent, err := discoverEvent.ToUsageEvent(req.Event)\n\t\tif err != nil {\n\t\t\treturn nil, trace.BadParameter(\"failed to convert eventData: %v\", err)\n\t\t}\n\t\treturn event, nil\n\n\tcase createNewRoleClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleClick{\n\t\t\t\tUiCreateNewRoleClick: &usageeventsv1.UICreateNewRoleClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleSaveClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleSaveClick{\n\t\t\t\tUiCreateNewRoleSaveClick: &usageeventsv1.UICreateNewRoleSaveClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleCancelClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleCancelClick{\n\t\t\t\tUiCreateNewRoleCancelClick: &usageeventsv1.UICreateNewRoleCancelClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleViewDocumentationClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleViewDocumentationClick{\n\t\t\t\tUiCreateNewRoleViewDocumentationClick: &usageeventsv1.UICreateNewRoleViewDocumentationClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiCallToActionClickEvent:\n\t\tvar cta int32\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &cta); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCallToActionClickEvent{\n\t\t\t\tUiCallToActionClickEvent: &usageeventsv1.UICallToActionClickEvent{\n\t\t\t\t\tCta: usageeventsv1.CTA(cta),\n\t\t\t\t}}},\n\t\t\tnil\n\n\tcase questionnaireSubmitEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardQuestionnaireSubmit{\n\t\t\t\tUiOnboardQuestionnaireSubmit: &usageeventsv1.UIOnboardQuestionnaireSubmitEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase featureRecommendationEvent:\n\t\tevent := struct {\n\t\t\tFeature int32 `json:\"feature\"`\n\t\t\tFeatureRecommendationStatus int32 `json:\"featureRecommendationStatus\"`\n\t\t}{}\n\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_FeatureRecommendationEvent{\n\t\t\t\tFeatureRecommendationEvent: &usageeventsv1.FeatureRecommendationEvent{\n\t\t\t\t\tFeature: usageeventsv1.Feature(event.Feature),\n\t\t\t\t\tFeatureRecommendationStatus: usageeventsv1.FeatureRecommendationStatus(event.FeatureRecommendationStatus),\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\t}\n\n\treturn nil, trace.BadParameter(\"invalid event %s\", req.Event)\n}", "func (m *UpsertEventRequest_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (e UpsertEventRequestValidationError) Reason() string { return e.reason }", "func (db *DB) Upsert(form url.Values, dataFields interface{}) *ResponseMessage {\n\n\t//bucketName := \"master_erp\"\n\t//docID := \"12121\"\n\tbytes := db.ProcessData(form, dataFields)\n\t//db.ProcessData(form, intrfc)\n\t//json.Unmarshal(bytes, intrfc) //***s\n\n\t//fmt.Println(\"DATA>>>>\", intrfc)\n\n\t//bytes, _ := json.Marshal(intrfc)\n\t//fmt.Println(\"intrfcBytes:\", string(bytes))\n\n\tdocID := form.Get(\"aid\") //docid=aid\n\tbucketName := form.Get(\"bucket\")\n\n\tinsertQuery := upsertQueryBuilder(bucketName, docID, string(bytes))\n\t//insertQuery := insertQueryBuilder(bucketName, docID, intrfc)\n\n\t//fmt.Println(insertQuery)\n\tnqlInsertStatement := sqlStatementJSON(insertQuery)\n\tresponseMessage := db.queryRequest(nqlInsertStatement)\n\n\treturn responseMessage\n}", "func (ut *eventPayload) Validate() (err error) {\n\tif ut.SportID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"sportId\"))\n\t}\n\tif ut.EventID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"eventId\"))\n\t}\n\tif ut.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"name\"))\n\t}\n\tif ut.SubTitle == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"subTitle\"))\n\t}\n\tif ut.StartDtTm == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"startDtTm\"))\n\t}\n\tif ut.EndDtTm == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"endDtTm\"))\n\t}\n\tif ut.LocationID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"locationId\"))\n\t}\n\tif ut.TeamAdvanceMethod == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"teamAdvanceMethod\"))\n\t}\n\treturn\n}", "func (uo *UpdateOptions) SetUpsert(b bool) *UpdateOptions {\n\tuo.Upsert = &b\n\treturn uo\n}", "func (mr *MockVirtualMeshCertificateSigningRequestWriterMockRecorder) UpsertVirtualMeshCertificateSigningRequest(ctx, obj interface{}, transitionFuncs ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{ctx, obj}, transitionFuncs...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertVirtualMeshCertificateSigningRequest\", reflect.TypeOf((*MockVirtualMeshCertificateSigningRequestWriter)(nil).UpsertVirtualMeshCertificateSigningRequest), varargs...)\n}", "func (e UpsertEventRequest_OptionValidationError) Cause() error { return e.cause }", "func (mr *MockVirtualMeshCertificateSigningRequestClientMockRecorder) UpsertVirtualMeshCertificateSigningRequest(ctx, obj interface{}, transitionFuncs ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{ctx, obj}, transitionFuncs...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertVirtualMeshCertificateSigningRequest\", reflect.TypeOf((*MockVirtualMeshCertificateSigningRequestClient)(nil).UpsertVirtualMeshCertificateSigningRequest), varargs...)\n}", "func (m *RequestEmailUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEmail(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func UpdateEvent(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\n\tif key, ok := vars[\"eventId\"]; ok {\n\t\tvar event Event\n\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\terr := decoder.Decode(&event)\n\n\t\t// Get EID from request URL\n\t\tif string(event.EID) != key {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\tfmt.Fprintf(w, `{\"error\": \"Bad Request\"}`)\n\t\t\treturn\n\t\t}\n\n\t\tif err != nil {\n\t\t\tLogError(w, err)\n\t\t\treturn\n\t\t}\n\n\t\t// Validate input\n\t\tif err := event.Validate(); err != nil {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\tfmt.Fprintf(w, `{\"error\": \"Bad Request\"}`)\n\t\t\treturn\n\t\t}\n\n\t\tif err := event.Save(); err != nil {\n\t\t\tLogError(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tw.WriteHeader(http.StatusAccepted)\n\t\tfmt.Fprintf(w, `{\"eid\": %d}`, event.EID)\n\t} else {\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\tfmt.Fprintf(w, `{\"error\": \"Not Found\"}`)\n\t}\n}", "func (umm *UpdateManyModel) SetUpsert(upsert bool) *UpdateManyModel {\n\tumm.Upsert = &upsert\n\treturn umm\n}", "func (obj *SObject) Upsert() *SObject {\n\tlog.Println(logPrefix, \"ExternalID:\", obj.ExternalID())\n\tlog.Println(logPrefix, \"ExternalIDField:\", obj.ExternalIDFieldName())\n\tif obj.Type() == \"\" || obj.client() == nil || obj.ExternalIDFieldName() == \"\" ||\n\t\tobj.ExternalID() == \"\" {\n\t\t// Sanity check.\n\t\tlog.Println(logPrefix, \"required fields are missing\")\n\t\treturn nil\n\t}\n\n\t// Make a copy of the incoming SObject, but skip certain metadata fields as they're not understood by salesforce.\n\treqObj := obj.makeCopy()\n\treqData, err := json.Marshal(reqObj)\n\tif err != nil {\n\t\tlog.Println(logPrefix, \"failed to convert sobject to json,\", err)\n\t\treturn nil\n\t}\n\n\tqueryBase := \"sobjects/\"\n\tif obj.client().useToolingAPI {\n\t\tqueryBase = \"tooling/sobjects/\"\n\t}\n\turl := obj.client().\n\t\tmakeURL(queryBase + obj.Type() + \"/\" + obj.ExternalIDFieldName() + \"/\" + obj.ExternalID())\n\trespData, err := obj.client().httpRequest(http.MethodPatch, url, bytes.NewReader(reqData))\n\tif err != nil {\n\t\tlog.Println(logPrefix, \"failed to process http request,\", err)\n\t\treturn nil\n\t}\n\n\t// Upsert returns with 201 and id in response if a new record is created. If a record is updated, it returns\n\t// a 204 with an empty response\n\tif len(respData) > 0 {\n\t\terr = obj.setIDFromResponseData(respData)\n\t\tif err != nil {\n\t\t\tlog.Println(logPrefix, \"failed to parse response,\", err)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\treturn obj\n}", "func (oee *OtxEpubEpub) Upsert(db XODB) error {\n\tvar err error\n\n\t// if already exist, bail\n\tif oee._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\t// sql query\n\tconst sqlstr = `INSERT INTO public.otx_epub_epub (` +\n\t\t`document_ptr_id, publisher, source, oebps_folder, manifest, contents` +\n\t\t`) VALUES (` +\n\t\t`$1, $2, $3, $4, $5, $6` +\n\t\t`) ON CONFLICT (document_ptr_id) DO UPDATE SET (` +\n\t\t`document_ptr_id, publisher, source, oebps_folder, manifest, contents` +\n\t\t`) = (` +\n\t\t`EXCLUDED.document_ptr_id, EXCLUDED.publisher, EXCLUDED.source, EXCLUDED.oebps_folder, EXCLUDED.manifest, EXCLUDED.contents` +\n\t\t`)`\n\n\t// run query\n\tXOLog(sqlstr, oee.DocumentPtrID, oee.Publisher, oee.Source, oee.OebpsFolder, oee.Manifest, oee.Contents)\n\t_, err = db.Exec(sqlstr, oee.DocumentPtrID, oee.Publisher, oee.Source, oee.OebpsFolder, oee.Manifest, oee.Contents)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set existence\n\toee._exists = true\n\n\treturn nil\n}", "func NewUpsertObjectRequest(space string) *UpsertObjectRequest {\n\treq := new(UpsertObjectRequest)\n\treq.initImpl(\"crud.upsert_object\")\n\treq.setSpace(space)\n\treq.object = MapObject{}\n\treq.operations = []Operation{}\n\treq.opts = UpsertObjectOpts{}\n\treturn req\n}", "func (p *AutoCommitter) Upsert(pair []interface{}) (e error) {\n\tif p.started {\n\t\tp.docsUpsert <- pair\n\t} else {\n\t\te = errors.New(fmt.Sprintf(\"AutoCommitter-%s(%s)_is_closed\", p.name, p.coll))\n\t}\n\treturn\n}", "func InputEvent(event cloudevents.Event) EventRecordOption {\n\tencodedEvent, err := json.Marshal(event)\n\tif err != nil {\n\t\treturn func(pod *corev1.Pod, client *testlib.Client) error {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn envOption(\"INPUT_EVENT\", string(encodedEvent))\n}", "func (o *PostDeviceUpsertParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\tif o.Payload != nil {\n\t\tif err := r.SetBodyParam(o.Payload); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *CMFFamilyUserPoliciesTake) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_family_user_policies_take provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfFamilyUserPoliciesTakeColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFFamilyUserPoliciesTakeUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfFamilyUserPoliciesTakeUpsertCacheMut.RLock()\n\tcache, cached := cmfFamilyUserPoliciesTakeUpsertCache[key]\n\tcmfFamilyUserPoliciesTakeUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfFamilyUserPoliciesTakeAllColumns,\n\t\t\tcmfFamilyUserPoliciesTakeColumnsWithDefault,\n\t\t\tcmfFamilyUserPoliciesTakeColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfFamilyUserPoliciesTakeAllColumns,\n\t\t\tcmfFamilyUserPoliciesTakePrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_family_user_policies_take, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_family_user_policies_take`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_family_user_policies_take` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_family_user_policies_take\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfFamilyUserPoliciesTakeMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_family_user_policies_take\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_family_user_policies_take\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfFamilyUserPoliciesTakeUpsertCacheMut.Lock()\n\t\tcmfFamilyUserPoliciesTakeUpsertCache[key] = cache\n\t\tcmfFamilyUserPoliciesTakeUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (service *EntriesService) Upsert(spaceID, contentTypeID string, e *Entry) error {\n\tbytesArray, err := json.Marshal(e)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar path string\n\tvar method string\n\n\tif e.Sys != nil && e.Sys.ID != \"\" {\n\t\tpath = fmt.Sprintf(\"/spaces/%s/entries/%s\", spaceID, e.Sys.ID)\n\t\tmethod = \"PUT\"\n\t} else {\n\t\tpath = fmt.Sprintf(\"/spaces/%s/entries\", spaceID)\n\t\tmethod = \"POST\"\n\t}\n\n\treq, err := service.c.newRequest(method, path, nil, bytes.NewReader(bytesArray))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"X-Contentful-Content-Type\", contentTypeID)\n\treq.Header.Set(\"X-Contentful-Version\", strconv.Itoa(e.GetVersion()))\n\n\treturn service.c.do(req, e)\n}", "func (ut *updateUserPayload) Validate() (err error) {\n\tif ut.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"name\"))\n\t}\n\tif ut.Email == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"email\"))\n\t}\n\tif ut.Bio == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"bio\"))\n\t}\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`request.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\tif ut.Name != nil {\n\t\tif ok := goa.ValidatePattern(`\\S`, *ut.Name); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`request.name`, *ut.Name, `\\S`))\n\t\t}\n\t}\n\tif ut.Name != nil {\n\t\tif utf8.RuneCountInString(*ut.Name) > 256 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`request.name`, *ut.Name, utf8.RuneCountInString(*ut.Name), 256, false))\n\t\t}\n\t}\n\treturn\n}", "func (e UpsertEventRequest_QuestionValidationError) Cause() error { return e.cause }", "func (m *PayoutLocationUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateUpdatedPayload(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (req *CreateItemRequest) ToUpsertItemSpec() *spec.UpsertItemSpec {\n\tvar upsertItemSpec spec.UpsertItemSpec\n\tupsertItemSpec.Name = req.Name\n\tupsertItemSpec.Description = req.Description\n\tupsertItemSpec.Tags = req.Tags\n\n\treturn &upsertItemSpec\n}", "func (ut *EventPayload) Validate() (err error) {\n\tif ut.SportID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"sportId\"))\n\t}\n\tif ut.EventID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"eventId\"))\n\t}\n\tif ut.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"name\"))\n\t}\n\tif ut.SubTitle == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"subTitle\"))\n\t}\n\tif ut.StartDtTm == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"startDtTm\"))\n\t}\n\tif ut.EndDtTm == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"endDtTm\"))\n\t}\n\tif ut.LocationID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"locationId\"))\n\t}\n\tif ut.TeamAdvanceMethod == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"teamAdvanceMethod\"))\n\t}\n\treturn\n}", "func (m *FreeIpaUpscaleV1Request) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEnvironmentCrn(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateTargetAvailabilityType(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *LolPlayerLevelUpPlayerLevelUpEvent) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (v UpdateTransactionalEndpointRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"SecurityPolicyID\": validation.Validate(v.SecurityPolicyID, validation.Required),\n\t\t\"OperationID\": validation.Validate(v.OperationID, validation.Required),\n\t\t\"JsonPayload\": validation.Validate(v.JsonPayload, validation.Required),\n\t}.Filter()\n}", "func (o *Utxo) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no utxo provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(utxoColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tutxoUpsertCacheMut.RLock()\n\tcache, cached := utxoUpsertCache[key]\n\tutxoUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tutxoAllColumns,\n\t\t\tutxoColumnsWithDefault,\n\t\t\tutxoColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tutxoAllColumns,\n\t\t\tutxoPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert utxo, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(utxoPrimaryKeyColumns))\n\t\t\tcopy(conflict, utxoPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"utxo\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(utxoType, utxoMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(utxoType, utxoMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert utxo\")\n\t}\n\n\tif !cached {\n\t\tutxoUpsertCacheMut.Lock()\n\t\tutxoUpsertCache[key] = cache\n\t\tutxoUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (m *UpsertEventRequest_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (m *UpdateMeetingV1Request) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetMeeting()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn UpdateMeetingV1RequestValidationError{\n\t\t\t\tfield: \"Meeting\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func ValidateUpdateRequest(message *taskspb.UpdateRequest) (err error) {\n\tif message.Task == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"task\", \"message\"))\n\t}\n\tif message.Task != nil {\n\t\tif err2 := ValidateStoredTask(message.Task); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\treturn\n}", "func (payload *PostEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (o *Email) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"mysql: no email provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(emailColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLEmailUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\temailUpsertCacheMut.RLock()\n\tcache, cached := emailUpsertCache[key]\n\temailUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\temailColumns,\n\t\t\temailColumnsWithDefault,\n\t\t\temailColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\temailColumns,\n\t\t\temailPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"mysql: unable to upsert email, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"email\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `email` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(emailType, emailMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(emailType, emailMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"mysql: unable to upsert for email\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == emailMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(emailType, emailMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"mysql: unable to retrieve unique values for email\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"mysql: unable to populate default values for email\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\temailUpsertCacheMut.Lock()\n\t\temailUpsertCache[key] = cache\n\t\temailUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (h *handler) Upsert(ctx context.Context, params db.Params) error {\n\tbsonFilter := bson.M{}\n\tfor key, val := range params.Filter {\n\t\tbsonFilter[key] = val\n\t}\n\tparams.UpsertData = map[string]interface{}{\n\t\t\"$set\": params.UpsertData,\n\t}\n\t_, err := h.getDatabase(params.Database).C(params.Collection).Upsert(bsonFilter, params.UpsertData)\n\treturn err\n}", "func (payload *postEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (o *DMessageEmbed) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns []string, whitelist ...string) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no d_message_embeds provided for upsert\")\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(dMessageEmbedColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs postgres problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range updateColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range whitelist {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tdMessageEmbedUpsertCacheMut.RLock()\n\tcache, cached := dMessageEmbedUpsertCache[key]\n\tdMessageEmbedUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tvar ret []string\n\t\twhitelist, ret = strmangle.InsertColumnSet(\n\t\t\tdMessageEmbedColumns,\n\t\t\tdMessageEmbedColumnsWithDefault,\n\t\t\tdMessageEmbedColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t\twhitelist,\n\t\t)\n\t\tupdate := strmangle.UpdateColumnSet(\n\t\t\tdMessageEmbedColumns,\n\t\t\tdMessageEmbedPrimaryKeyColumns,\n\t\t\tupdateColumns,\n\t\t)\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert d_message_embeds, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(dMessageEmbedPrimaryKeyColumns))\n\t\t\tcopy(conflict, dMessageEmbedPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = queries.BuildUpsertQueryPostgres(dialect, \"\\\"d_message_embeds\\\"\", updateOnConflict, ret, update, conflict, whitelist)\n\n\t\tcache.valueMapping, err = queries.BindMapping(dMessageEmbedType, dMessageEmbedMapping, whitelist)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(dMessageEmbedType, dMessageEmbedMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert d_message_embeds\")\n\t}\n\n\tif !cached {\n\t\tdMessageEmbedUpsertCacheMut.Lock()\n\t\tdMessageEmbedUpsertCache[key] = cache\n\t\tdMessageEmbedUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (o *Vendor) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no vendors provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(vendorColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLVendorUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tvendorUpsertCacheMut.RLock()\n\tcache, cached := vendorUpsertCache[key]\n\tvendorUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tvendorColumns,\n\t\t\tvendorColumnsWithDefault,\n\t\t\tvendorColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tvendorColumns,\n\t\t\tvendorPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert vendors, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"vendors\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `vendors` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(vendorType, vendorMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(vendorType, vendorMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for vendors\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == vendorMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(vendorType, vendorMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for vendors\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for vendors\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tvendorUpsertCacheMut.Lock()\n\t\tvendorUpsertCache[key] = cache\n\t\tvendorUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (c *CloudWatchEvents) PutEventsRequest(input *PutEventsInput) (req *request.Request, output *PutEventsOutput) {\n\top := &request.Operation{\n\t\tName: opPutEvents,\n\t\tHTTPMethod: \"POST\",\n\t\tHTTPPath: \"/\",\n\t}\n\n\tif input == nil {\n\t\tinput = &PutEventsInput{}\n\t}\n\n\treq = c.newRequest(op, input, output)\n\toutput = &PutEventsOutput{}\n\treq.Data = output\n\treturn\n}", "func (o *Employee) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no employee provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(employeeColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\temployeeUpsertCacheMut.RLock()\n\tcache, cached := employeeUpsertCache[key]\n\temployeeUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeeColumnsWithDefault,\n\t\t\temployeeColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeePrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert employee, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(employeePrimaryKeyColumns))\n\t\t\tcopy(conflict, employeePrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"employee\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(employeeType, employeeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(employeeType, employeeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert employee\")\n\t}\n\n\tif !cached {\n\t\temployeeUpsertCacheMut.Lock()\n\t\temployeeUpsertCache[key] = cache\n\t\temployeeUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (mr *MockProductMockRecorder) UpsertProductEquipments(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertProductEquipments\", reflect.TypeOf((*MockProduct)(nil).UpsertProductEquipments), arg0, arg1)\n}", "func (r *DeviceManagementTroubleshootingEventRequest) Update(ctx context.Context, reqObj *DeviceManagementTroubleshootingEvent) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (e *ExternalInterfaces) SubmitTestEvent(ctx context.Context, req *eventsproto.EventSubRequest) response.RPC {\n\tvar resp response.RPC\n\tauthResp, err := e.Auth(ctx, req.SessionToken, []string{common.PrivilegeConfigureComponents}, []string{})\n\tif authResp.StatusCode != http.StatusOK {\n\t\terrMsg := fmt.Sprintf(\"error while trying to authenticate session: status code: %v, status message: %v\", authResp.StatusCode, authResp.StatusMessage)\n\t\tif err != nil {\n\t\t\terrMsg = errMsg + \": \" + err.Error()\n\t\t}\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn authResp\n\t}\n\t// First get the UserName from SessionToken\n\tsessionUserName, err := e.GetSessionUserName(ctx, req.SessionToken)\n\tif err != nil {\n\t\t// handle the error case with appropriate response body\n\t\terrMsg := \"error while trying to authenticate session: \" + err.Error()\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusUnauthorized, response.NoValidSession, errMsg, nil, nil)\n\t}\n\n\ttestEvent, statusMessage, errMsg, msgArgs := validAndGenSubTestReq(req.PostBody)\n\tif statusMessage != response.Success {\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusBadRequest, statusMessage, errMsg, msgArgs, nil)\n\t}\n\n\t// parsing the event\n\tvar eventObj interface{}\n\terr = JSONUnmarshal(req.PostBody, &eventObj)\n\tif err != nil {\n\t\terrMsg := \"unable to parse the event request\" + err.Error()\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusInternalServerError, response.InternalError, errMsg, nil, nil)\n\t}\n\t// Validating the request JSON properties for case sensitive\n\tinvalidProperties, err := RequestParamsCaseValidatorFunc(req.PostBody, eventObj)\n\tif err != nil {\n\t\terrMsg := \"error while validating request parameters: \" + err.Error()\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusInternalServerError, response.InternalError, errMsg, nil, nil)\n\t} else if invalidProperties != \"\" {\n\t\terrorMessage := \"error: one or more properties given in the request body are not valid, ensure properties are listed in upper camel case \"\n\t\tl.LogWithFields(ctx).Error(errorMessage)\n\t\tresp := common.GeneralError(http.StatusBadRequest, response.PropertyUnknown, errorMessage, []interface{}{invalidProperties}, nil)\n\t\treturn resp\n\t}\n\t// Find out all the subscription destinations of the requesting user\n\tsubscriptions, err := e.GetEvtSubscriptions(sessionUserName)\n\tif err != nil {\n\t\t// Internal error\n\t\terrMsg := \"error while trying to find the event destination\"\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusInternalServerError, response.InternalError, errMsg, nil, nil)\n\t}\n\t// we need common.MessageData to find the correct destination to send test event\n\tvar message common.MessageData\n\tmessage.Events = append(message.Events, *testEvent)\n\tmessageBytes, _ := json.Marshal(message)\n\teventUniqueID := uuid.NewV4().String()\n\tfor _, sub := range subscriptions {\n\t\tfor _, origin := range sub.EventDestination.OriginResources {\n\t\t\tif sub.EventDestination.Destination != \"\" {\n\t\t\t\tsubscription := *sub.EventDestination\n\t\t\t\tsubscription.ID = sub.SubscriptionID\n\n\t\t\t\tif filterEventsToBeForwarded(ctx, subscription, message.Events[0], []model.Link{{Oid: origin.Oid}}) {\n\t\t\t\t\tl.LogWithFields(ctx).Info(\"Destination: \" + sub.EventDestination.Destination)\n\t\t\t\t\tgo e.postEvent(evmodel.EventPost{Destination: sub.EventDestination.Destination, EventID: eventUniqueID,\n\t\t\t\t\t\tMessage: messageBytes})\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tresp.StatusCode = http.StatusOK\n\tresp.StatusMessage = response.Success\n\tresp.Body = response.ErrorClass{\n\t\tCode: resp.StatusMessage,\n\t\tMessage: \"Request completed successfully.\",\n\t}\n\treturn resp\n\n}", "func (o *Ticket) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no tickets provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(ticketColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tticketUpsertCacheMut.RLock()\n\tcache, cached := ticketUpsertCache[key]\n\tticketUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tticketAllColumns,\n\t\t\tticketColumnsWithDefault,\n\t\t\tticketColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tticketAllColumns,\n\t\t\tticketPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert tickets, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(ticketPrimaryKeyColumns))\n\t\t\tcopy(conflict, ticketPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"tickets\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(ticketType, ticketMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(ticketType, ticketMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif errors.Is(err, sql.ErrNoRows) {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert tickets\")\n\t}\n\n\tif !cached {\n\t\tticketUpsertCacheMut.Lock()\n\t\tticketUpsertCache[key] = cache\n\t\tticketUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (o *Notification) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no notification provided for upsert\")\n\t}\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\tif o.CreatedAt.IsZero() {\n\t\to.CreatedAt = currTime\n\t}\n\to.UpdatedAt = currTime\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(notificationColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tnotificationUpsertCacheMut.RLock()\n\tcache, cached := notificationUpsertCache[key]\n\tnotificationUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tnotificationColumns,\n\t\t\tnotificationColumnsWithDefault,\n\t\t\tnotificationColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tnotificationColumns,\n\t\t\tnotificationPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert notification, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(notificationPrimaryKeyColumns))\n\t\t\tcopy(conflict, notificationPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"notification\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(notificationType, notificationMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(notificationType, notificationMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert notification\")\n\t}\n\n\tif !cached {\n\t\tnotificationUpsertCacheMut.Lock()\n\t\tnotificationUpsertCache[key] = cache\n\t\tnotificationUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}", "func (m *UserUpdatePayload) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (req *UpsertRequest) Opts(opts UpsertOpts) *UpsertRequest {\n\treq.opts = opts\n\treturn req\n}", "func (ocHandler *OrderConstraintsOverridesHandler) Upsert(pair *model.TradingPair, override *model.OrderConstraintsOverride) {\n\texistingOverride, exists := ocHandler.overrides[pair.String()]\n\tif !exists {\n\t\tocHandler.overrides[pair.String()] = override\n\t\treturn\n\t}\n\n\texistingOverride.Augment(override)\n\tocHandler.overrides[pair.String()] = existingOverride\n}", "func (m *AddonUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateChargeType(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePeriodUnit(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateType(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (e VulnEvent) Validate() error {\n\tif e.SourceName == \"\" {\n\t\treturn fmt.Errorf(\"must set SourceName in event\")\n\t}\n\tif e.Asset.IPAddress == \"\" && e.Asset.Hostname == \"\" {\n\t\treturn fmt.Errorf(\"must set IPAddress or Hostname in event\")\n\t}\n\treturn nil\n}", "func (e UpsertEventRequest_SectionValidationError) Cause() error { return e.cause }", "func (o *Peer) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"model: no peers provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(peerColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLPeerUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tpeerUpsertCacheMut.RLock()\n\tcache, cached := peerUpsertCache[key]\n\tpeerUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tpeerAllColumns,\n\t\t\tpeerColumnsWithDefault,\n\t\t\tpeerColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tpeerAllColumns,\n\t\t\tpeerPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"model: unable to upsert peers, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"peers\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `peers` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(peerType, peerMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(peerType, peerMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to upsert for peers\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == peerMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(peerType, peerMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to retrieve unique values for peers\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to populate default values for peers\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tpeerUpsertCacheMut.Lock()\n\t\tpeerUpsertCache[key] = cache\n\t\tpeerUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (a AddEventRequest) Validate() error {\n\tif err := v2.Validate(a); err != nil {\n\t\treturn err\n\t}\n\n\t// BaseReading has the skip(\"-\") validation annotation for BinaryReading and SimpleReading\n\t// Otherwise error will occur as only one of them exists\n\t// Therefore, need to validate the nested BinaryReading and SimpleReading struct here\n\tfor _, r := range a.Event.Readings {\n\t\tif err := r.Validate(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *Offer) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"stellarcore: no offers provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(offerColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tofferUpsertCacheMut.RLock()\n\tcache, cached := offerUpsertCache[key]\n\tofferUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tofferColumns,\n\t\t\tofferColumnsWithDefault,\n\t\t\tofferColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tofferColumns,\n\t\t\tofferPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"stellarcore: unable to upsert offers, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(offerPrimaryKeyColumns))\n\t\t\tcopy(conflict, offerPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"offers\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(offerType, offerMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(offerType, offerMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"stellarcore: unable to upsert offers\")\n\t}\n\n\tif !cached {\n\t\tofferUpsertCacheMut.Lock()\n\t\tofferUpsertCache[key] = cache\n\t\tofferUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}", "func (fc *FakeCollection) Upsert(col string, value interface{}, opts *gocb.UpsertOptions) (*gocb.MutationResult, error) {\n\tif fc.Force == \"error\" {\n\t\treturn &gocb.MutationResult{}, errors.New(\"Forced collection upsert error\")\n\t}\n\treturn &gocb.MutationResult{}, nil\n}", "func (m *RuleConfigManager) Upsert(key string, r *RuleConfig, opts ...RequestOption) (err error) {\n\treturn m.Request(\"PUT\", m.URI(\"rules-configs\", key), r, opts...)\n}", "func EncodeUnsealRequest(_ context.Context, request interface{}) (interface{}, error) {\n\treq := request.(endpoints.UnsealRequest)\n\treturn &pb.UnsealRequest{\n\t\tKey: req.Key,\n\t\tReset_: req.Reset,\n\t}, nil\n}", "func (ut *OwnerInputPayload) Validate() (err error) {\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`response.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\treturn\n}", "func (ut *ownerInputPayload) Validate() (err error) {\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`response.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\treturn\n}", "func (itemHandle) Upsert(c *app.Context) error {\n\tvar it item.Item\n\tif err := json.NewDecoder(c.Request.Body).Decode(&it); err != nil {\n\t\treturn err\n\t}\n\n\tif err := item.Upsert(c.SessionID, c.Ctx[\"DB\"].(*db.DB), &it); err != nil {\n\t\treturn err\n\t}\n\n\tc.Respond(nil, http.StatusNoContent)\n\treturn nil\n}", "func (o *PatchRetryEventUsingPATCHParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.ErrorCause != nil {\n\t\tif err := r.SetBodyParam(o.ErrorCause); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// path param eventId\n\tif err := r.SetPathParam(\"eventId\", o.EventID); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *Subscriber) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no subscribers provided for upsert\")\n\t}\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\tif o.CreatedAt.IsZero() {\n\t\to.CreatedAt = currTime\n\t}\n\to.UpdatedAt = currTime\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(subscriberColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLSubscriberUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tsubscriberUpsertCacheMut.RLock()\n\tcache, cached := subscriberUpsertCache[key]\n\tsubscriberUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tsubscriberColumns,\n\t\t\tsubscriberColumnsWithDefault,\n\t\t\tsubscriberColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tsubscriberColumns,\n\t\t\tsubscriberPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert subscribers, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"subscribers\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `subscribers` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(subscriberType, subscriberMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(subscriberType, subscriberMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for subscribers\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == subscriberMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(subscriberType, subscriberMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for subscribers\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for subscribers\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tsubscriberUpsertCacheMut.Lock()\n\t\tsubscriberUpsertCache[key] = cache\n\t\tsubscriberUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (e *Event) Validate() error {\n\tif e.Message == \"\" || e.MessageOffset == \"\" || e.Time == nil || e.Type == \"\" {\n\t\treturn errs.ErrMissingParameters\n\t}\n\treturn nil\n}", "func EventSignupCreate(ctx *gin.Context) {\n\tvar user *model.User\n\tif userInterface, exists := ctx.Get(\"User\"); !exists {\n\t\tmisc.ReturnStandardError(ctx, http.StatusForbidden, \"you have to be a registered user to create signup record\")\n\t\treturn\n\t} else {\n\t\tuser = userInterface.(*model.User)\n\t}\n\teventSignup := &model.EventSignup{}\n\tif err := jsonapi.UnmarshalPayload(ctx.Request.Body, eventSignup); err != nil {\n\t\tmisc.ReturnStandardError(ctx, http.StatusBadRequest, \"cannot unmarshal JSON of request\")\n\t\treturn\n\t} else if eventSignup.Event == nil || eventSignup.Event.ID <= 0 {\n\t\tmisc.ReturnStandardError(ctx, http.StatusBadRequest, \"invalid event ID\")\n\t\treturn\n\t}\n\tdb := ctx.MustGet(\"DB\").(*gorm.DB)\n\tevent := model.Event{}\n\tif err := db.Where(eventSignup.Event).First(&event).Error; errors.Is(err, gorm.ErrRecordNotFound) {\n\t\tmisc.ReturnStandardError(ctx, http.StatusNotFound, \"specified event cannot be found\")\n\t\treturn\n\t} else if err != nil {\n\t\tmisc.ReturnStandardError(ctx, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\tif *event.OrganizerID == user.ID {\n\t\tmisc.ReturnStandardError(ctx, http.StatusBadRequest, \"you cannot signup events organized by yourself\")\n\t\treturn\n\t}\n\teventSignup.EventID = &event.ID\n\teventSignup.Event = &event\n\teventSignup.UserID = &user.ID\n\teventSignup.User = user\n\tif err := db.Save(eventSignup).Error; err != nil {\n\t\tmisc.ReturnStandardError(ctx, http.StatusInternalServerError, err.Error())\n\t} else {\n\t\tctx.Status(http.StatusCreated)\n\t\tif err := jsonapi.MarshalPayload(ctx.Writer, eventSignup); err != nil {\n\t\t\tmisc.ReturnStandardError(ctx, http.StatusInternalServerError, err.Error())\n\t\t}\n\t}\n}", "func (e UpsertEventResponseValidationError) Reason() string { return e.reason }", "func (a AddEventRequest) Validate() error {\n\tif err := contracts.Validate(a); err != nil {\n\t\treturn err\n\t}\n\n\t// BaseReading has the skip(\"-\") validation annotation for BinaryReading and SimpleReading\n\t// Otherwise error will occur as only one of them exists\n\t// Therefore, need to validate the nested BinaryReading and SimpleReading struct here\n\tfor _, r := range a.Event.Readings {\n\t\tif err := r.Validate(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (e SetApplicationPubSubRequestValidationError) Cause() error { return e.cause }", "func marshalUpdateAttestorUpdateAttestorRequest(c *Client, m map[string]interface{}) ([]byte, error) {\n\n\treturn json.Marshal(m)\n}", "func marshalUpdateAttestorUpdateAttestorRequest(c *Client, m map[string]interface{}) ([]byte, error) {\n\n\treturn json.Marshal(m)\n}", "func (m *StreamEventsRequest) Validate() error {\n\treturn m.validate(false)\n}", "func (o *PeerProperty) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no peer_properties provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(peerPropertyColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tpeerPropertyUpsertCacheMut.RLock()\n\tcache, cached := peerPropertyUpsertCache[key]\n\tpeerPropertyUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tpeerPropertyAllColumns,\n\t\t\tpeerPropertyColumnsWithDefault,\n\t\t\tpeerPropertyColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tpeerPropertyAllColumns,\n\t\t\tpeerPropertyPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert peer_properties, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(peerPropertyPrimaryKeyColumns))\n\t\t\tcopy(conflict, peerPropertyPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"peer_properties\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(peerPropertyType, peerPropertyMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(peerPropertyType, peerPropertyMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert peer_properties\")\n\t}\n\n\tif !cached {\n\t\tpeerPropertyUpsertCacheMut.Lock()\n\t\tpeerPropertyUpsertCache[key] = cache\n\t\tpeerPropertyUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (o *PostV1IncidentsIncidentIDRelatedChangeEventsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.V1IncidentsIncidentIDRelatedChangeEvents != nil {\n\t\tif err := r.SetBodyParam(o.V1IncidentsIncidentIDRelatedChangeEvents); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// path param incident_id\n\tif err := r.SetPathParam(\"incident_id\", o.IncidentID); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (r *DeviceManagementAutopilotEventRequest) Update(ctx context.Context, reqObj *DeviceManagementAutopilotEvent) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (service *EntriesService) Upsert(spaceID string, entry *Entry) error {\n\tfieldsOnly := map[string]interface{}{\n\t\t\"fields\": entry.Fields,\n\t}\n\n\tbytesArray, err := json.Marshal(fieldsOnly)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Creating/updating an entry requires a content type to be provided\n\tif entry.Sys.ContentType == nil {\n\t\treturn fmt.Errorf(\"creating/updating an entry requires a content type\")\n\t}\n\n\tvar path string\n\tvar method string\n\n\tif entry.Sys != nil && entry.Sys.ID != \"\" {\n\t\tpath = fmt.Sprintf(\"/spaces/%s%s/entries/%s\", spaceID, getEnvPath(service.c), entry.Sys.ID)\n\t\tmethod = \"PUT\"\n\t} else {\n\t\tpath = fmt.Sprintf(\"/spaces/%s%s/entries\", spaceID, getEnvPath(service.c))\n\t\tmethod = \"POST\"\n\t}\n\n\treq, err := service.c.newRequest(method, path, nil, bytes.NewReader(bytesArray))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"X-Contentful-Version\", strconv.Itoa(entry.GetVersion()))\n\treq.Header.Set(\"X-Contentful-Content-Type\", entry.Sys.ContentType.Sys.ID)\n\n\treturn service.c.do(req, entry)\n}", "func (o *Latency) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no latencies provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(latencyColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tlatencyUpsertCacheMut.RLock()\n\tcache, cached := latencyUpsertCache[key]\n\tlatencyUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tlatencyAllColumns,\n\t\t\tlatencyColumnsWithDefault,\n\t\t\tlatencyColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tlatencyAllColumns,\n\t\t\tlatencyPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert latencies, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(latencyPrimaryKeyColumns))\n\t\t\tcopy(conflict, latencyPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"latencies\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(latencyType, latencyMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(latencyType, latencyMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert latencies\")\n\t}\n\n\tif !cached {\n\t\tlatencyUpsertCacheMut.Lock()\n\t\tlatencyUpsertCache[key] = cache\n\t\tlatencyUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (e UpsertEventRequestValidationError) Key() bool { return e.key }", "func (e UpdateMeetingV1RequestValidationError) Cause() error { return e.cause }", "func (v UpdateReputationProfileActionRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"PolicyID\": validation.Validate(v.PolicyID, validation.Required),\n\t\t\"ReputationProfileID\": validation.Validate(v.ReputationProfileID, validation.Required),\n\t}.Filter()\n}", "func (o *Transaction) Upsert(exec boil.Executor, updateColumns []string, whitelist ...string) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no transactions provided for upsert\")\n\t}\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\to.UpdatedAt.Time = currTime\n\to.UpdatedAt.Valid = true\n\n\tnzDefaults := queries.NonZeroDefaultSet(transactionColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs postgres problems\n\tbuf := strmangle.GetBuffer()\n\tfor _, c := range updateColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range whitelist {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\ttransactionUpsertCacheMut.RLock()\n\tcache, cached := transactionUpsertCache[key]\n\ttransactionUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := strmangle.InsertColumnSet(\n\t\t\ttransactionColumns,\n\t\t\ttransactionColumnsWithDefault,\n\t\t\ttransactionColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t\twhitelist,\n\t\t)\n\n\t\tupdate := strmangle.UpdateColumnSet(\n\t\t\ttransactionColumns,\n\t\t\ttransactionPrimaryKeyColumns,\n\t\t\tupdateColumns,\n\t\t)\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert transactions, could not build update column list\")\n\t\t}\n\n\t\tcache.query = queries.BuildUpsertQueryMySQL(dialect, \"transactions\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `transactions` WHERE `transaction_id`=?\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(transactionType, transactionMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(transactionType, transactionMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.Exec(cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for transactions\")\n\t}\n\n\tvar lastID int64\n\tvar identifierCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.TransactionID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == transactionMapping[\"TransactionID\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tidentifierCols = []interface{}{\n\t\to.TransactionID,\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, identifierCols...)\n\t}\n\n\terr = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for transactions\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\ttransactionUpsertCacheMut.Lock()\n\t\ttransactionUpsertCache[key] = cache\n\t\ttransactionUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (m *SecurityPolicyUpdateParams) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateApplyTo(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateEgress(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateIngress(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePolicyMode(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (repository *GormRepository) Upsert(uow *UnitOfWork, entity interface{}, queryProcessors []QueryProcessor) microappError.DatabaseError {\n\tdb := uow.DB\n\tif queryProcessors != nil {\n\t\tvar err error\n\t\tfor _, queryProcessor := range queryProcessors {\n\t\t\tdb, err = queryProcessor(db, entity)\n\t\t\tif err != nil {\n\t\t\t\treturn microappError.NewDatabaseError(err)\n\t\t\t}\n\t\t}\n\t}\n\tresult := db.Model(entity).Updates(entity)\n\tif result.Error != nil {\n\t\treturn microappError.NewDatabaseError(result.Error)\n\t}\n\n\tif result.RowsAffected == 0 {\n\t\tif err := uow.DB.Create(entity).Error; err != nil {\n\t\t\treturn microappError.NewDatabaseError(err)\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *Inventory) Upsert(exec boil.Executor, updateColumns []string, whitelist ...string) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no inventory provided for upsert\")\n\t}\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\tif o.CreatedAt.IsZero() {\n\t\to.CreatedAt = currTime\n\t}\n\to.UpdatedAt = currTime\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(inventoryColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs postgres problems\n\tbuf := strmangle.GetBuffer()\n\tfor _, c := range updateColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range whitelist {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tinventoryUpsertCacheMut.RLock()\n\tcache, cached := inventoryUpsertCache[key]\n\tinventoryUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := strmangle.InsertColumnSet(\n\t\t\tinventoryColumns,\n\t\t\tinventoryColumnsWithDefault,\n\t\t\tinventoryColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t\twhitelist,\n\t\t)\n\n\t\tupdate := strmangle.UpdateColumnSet(\n\t\t\tinventoryColumns,\n\t\t\tinventoryPrimaryKeyColumns,\n\t\t\tupdateColumns,\n\t\t)\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert inventory, could not build update column list\")\n\t\t}\n\n\t\tcache.query = queries.BuildUpsertQueryMySQL(dialect, \"inventory\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `inventory` WHERE `id`=?\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(inventoryType, inventoryMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(inventoryType, inventoryMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.Exec(cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for inventory\")\n\t}\n\n\tvar lastID int64\n\tvar identifierCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == inventoryMapping[\"ID\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tidentifierCols = []interface{}{\n\t\to.ID,\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, identifierCols...)\n\t}\n\n\terr = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for inventory\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tinventoryUpsertCacheMut.Lock()\n\t\tinventoryUpsertCache[key] = cache\n\t\tinventoryUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}", "func (e *Event) Update(c echo.Context, req *Update) (*takrib.Event, error) {\n\t// if err := e.rbac.EnforceEvent(c, req.ID); err != nil {\n\t// \treturn nil, err\n\t// }\n\n\tevent, err := e.udb.View(e.db, req.ID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tstructs.Merge(event, req)\n\tif err := e.udb.Update(e.db, event); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn event, nil\n}", "func (o *ScheduleSubject) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no schedule_subject provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(scheduleSubjectColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tscheduleSubjectUpsertCacheMut.RLock()\n\tcache, cached := scheduleSubjectUpsertCache[key]\n\tscheduleSubjectUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tscheduleSubjectAllColumns,\n\t\t\tscheduleSubjectColumnsWithDefault,\n\t\t\tscheduleSubjectColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tscheduleSubjectAllColumns,\n\t\t\tscheduleSubjectPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert schedule_subject, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(scheduleSubjectPrimaryKeyColumns))\n\t\t\tcopy(conflict, scheduleSubjectPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"schedule_subject\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(scheduleSubjectType, scheduleSubjectMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(scheduleSubjectType, scheduleSubjectMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert schedule_subject\")\n\t}\n\n\tif !cached {\n\t\tscheduleSubjectUpsertCacheMut.Lock()\n\t\tscheduleSubjectUpsertCache[key] = cache\n\t\tscheduleSubjectUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (_m *AlertmanagerService) Upsert(credential domain.AlertCredential) error {\n\tret := _m.Called(credential)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(domain.AlertCredential) error); ok {\n\t\tr0 = rf(credential)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (o *DeleteSubscribedEventParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\t// path param eventName\n\tif err := r.SetPathParam(\"eventName\", o.EventName); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (mr *MockProductMockRecorder) UpsertProductApplications(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertProductApplications\", reflect.TypeOf((*MockProduct)(nil).UpsertProductApplications), arg0, arg1)\n}", "func (o *Transaction) Upsert(exec boil.Executor, updateColumns []string, whitelist ...string) error {\n\tif o == nil {\n\t\treturn errors.New(\"model: no transaction provided for upsert\")\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(transactionColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs postgres problems\n\tbuf := strmangle.GetBuffer()\n\tfor _, c := range updateColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range whitelist {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\ttransactionUpsertCacheMut.RLock()\n\tcache, cached := transactionUpsertCache[key]\n\ttransactionUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := strmangle.InsertColumnSet(\n\t\t\ttransactionColumns,\n\t\t\ttransactionColumnsWithDefault,\n\t\t\ttransactionColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t\twhitelist,\n\t\t)\n\n\t\tupdate := strmangle.UpdateColumnSet(\n\t\t\ttransactionColumns,\n\t\t\ttransactionPrimaryKeyColumns,\n\t\t\tupdateColumns,\n\t\t)\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"model: unable to upsert transaction, could not build update column list\")\n\t\t}\n\n\t\tcache.query = queries.BuildUpsertQueryMySQL(dialect, \"transaction\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `transaction` WHERE `id`=?\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(transactionType, transactionMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(transactionType, transactionMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.Exec(cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to upsert for transaction\")\n\t}\n\n\tvar lastID int64\n\tvar identifierCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == transactionMapping[\"ID\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tidentifierCols = []interface{}{\n\t\to.ID,\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, identifierCols...)\n\t}\n\n\terr = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to populate default values for transaction\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\ttransactionUpsertCacheMut.Lock()\n\t\ttransactionUpsertCache[key] = cache\n\t\ttransactionUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (e StreamEventsRequestValidationError) Cause() error { return e.cause }", "func (o *CreateEventAlertConditionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.EventAlertCondition != nil {\n\t\tif err := r.SetBodyParam(o.EventAlertCondition); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}" ]
[ "0.5974808", "0.5636589", "0.54915786", "0.5482973", "0.53708184", "0.53068554", "0.5265901", "0.5127778", "0.5114349", "0.5099771", "0.5088743", "0.50795025", "0.50615066", "0.5030816", "0.5026924", "0.49976876", "0.49662232", "0.49546254", "0.49494067", "0.49102342", "0.48462546", "0.4815599", "0.47849116", "0.47667482", "0.47582453", "0.47193092", "0.47062165", "0.4693007", "0.4686909", "0.46864963", "0.468399", "0.46753043", "0.4672908", "0.46631488", "0.46552786", "0.46219876", "0.4617276", "0.461532", "0.46141925", "0.45973957", "0.45873657", "0.4569692", "0.45613214", "0.45572704", "0.4556806", "0.45551437", "0.4554826", "0.45499584", "0.454955", "0.4549155", "0.45371717", "0.4533655", "0.4522557", "0.45222223", "0.45148522", "0.45038038", "0.45028412", "0.45015335", "0.4485049", "0.44717482", "0.44594932", "0.44571596", "0.44524708", "0.4449403", "0.44444284", "0.4442294", "0.44418737", "0.442511", "0.44219816", "0.4419608", "0.44141346", "0.4400851", "0.43958724", "0.4392422", "0.4382861", "0.43713766", "0.43565974", "0.4353932", "0.4353932", "0.4350422", "0.43471715", "0.43360126", "0.43173638", "0.43135235", "0.43089086", "0.43075162", "0.42994106", "0.42973402", "0.4294174", "0.4283004", "0.42828807", "0.42794117", "0.427579", "0.42653945", "0.42629346", "0.4262405", "0.4260041", "0.4257318", "0.42550436", "0.42544022" ]
0.7299382
0
Field function returns field value.
func (e UpsertEventRequestValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e UpsertEventRequestValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e UpsertEventRequestValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.684976
44
Key function returns key value.
func (e UpsertEventRequestValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on UpsertEventResponse with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *UpsertEventResponse) Validate() error { if m == nil { return nil } // no validation rules for Id return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *UpsertEventRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetParticipants()) < 4 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Participants\",\n\t\t\treason: \"value length must be at least 4 runes\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequestValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Reason() string { return e.reason }", "func (m *UpdateEmployeeResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for EmpNo\n\n\treturn nil\n}", "func (e UpsertEventRequestValidationError) Reason() string { return e.reason }", "func (m *UpdateMeetingV1Response) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\treturn nil\n}", "func (ut *eventPayload) Validate() (err error) {\n\tif ut.SportID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"sportId\"))\n\t}\n\tif ut.EventID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"eventId\"))\n\t}\n\tif ut.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"name\"))\n\t}\n\tif ut.SubTitle == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"subTitle\"))\n\t}\n\tif ut.StartDtTm == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"startDtTm\"))\n\t}\n\tif ut.EndDtTm == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"endDtTm\"))\n\t}\n\tif ut.LocationID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"locationId\"))\n\t}\n\tif ut.TeamAdvanceMethod == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"teamAdvanceMethod\"))\n\t}\n\treturn\n}", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (o *PostPunchInOKBodyResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.validateAttendance(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *UpsertEventRequest_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetDescription()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Description\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (ut *EventPayload) Validate() (err error) {\n\tif ut.SportID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"sportId\"))\n\t}\n\tif ut.EventID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"eventId\"))\n\t}\n\tif ut.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"name\"))\n\t}\n\tif ut.SubTitle == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"subTitle\"))\n\t}\n\tif ut.StartDtTm == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"startDtTm\"))\n\t}\n\tif ut.EndDtTm == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"endDtTm\"))\n\t}\n\tif ut.LocationID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"locationId\"))\n\t}\n\tif ut.TeamAdvanceMethod == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"teamAdvanceMethod\"))\n\t}\n\treturn\n}", "func (m *UpsertEventRequest_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (m *UpsertCatalogObjectResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateCatalogObject(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateErrors(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateIDMappings(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *PostUsersImportsResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateStatus(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateType(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func UpdateEvent(w http.ResponseWriter, r *http.Request) {\n\tvars := mux.Vars(r)\n\n\tif key, ok := vars[\"eventId\"]; ok {\n\t\tvar event Event\n\n\t\tdecoder := json.NewDecoder(r.Body)\n\t\terr := decoder.Decode(&event)\n\n\t\t// Get EID from request URL\n\t\tif string(event.EID) != key {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\tfmt.Fprintf(w, `{\"error\": \"Bad Request\"}`)\n\t\t\treturn\n\t\t}\n\n\t\tif err != nil {\n\t\t\tLogError(w, err)\n\t\t\treturn\n\t\t}\n\n\t\t// Validate input\n\t\tif err := event.Validate(); err != nil {\n\t\t\tw.WriteHeader(http.StatusBadRequest)\n\t\t\tfmt.Fprintf(w, `{\"error\": \"Bad Request\"}`)\n\t\t\treturn\n\t\t}\n\n\t\tif err := event.Save(); err != nil {\n\t\t\tLogError(w, err)\n\t\t\treturn\n\t\t}\n\n\t\tw.WriteHeader(http.StatusAccepted)\n\t\tfmt.Fprintf(w, `{\"eid\": %d}`, event.EID)\n\t} else {\n\t\tw.WriteHeader(http.StatusNotFound)\n\t\tfmt.Fprintf(w, `{\"error\": \"Not Found\"}`)\n\t}\n}", "func NewSummaryEventResponse() *SummaryEventResponse {\n\tthis := SummaryEventResponse{}\n\treturn &this\n}", "func (m *StreamEventsResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (payload *putEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (m *UpdateMessageResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Updated\n\n\treturn nil\n}", "func (ut *ownerInputPayload) Validate() (err error) {\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`response.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\treturn\n}", "func (ut *OwnerInputPayload) Validate() (err error) {\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`response.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\treturn\n}", "func (uom *UpdateOneModel) SetUpsert(upsert bool) *UpdateOneModel {\n\tuom.Upsert = &upsert\n\treturn uom\n}", "func (m *UpdateResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Username\n\n\t// no validation rules for Email\n\n\t// no validation rules for Role\n\n\treturn nil\n}", "func (payload *PutEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (e *ExternalInterfaces) SubmitTestEvent(ctx context.Context, req *eventsproto.EventSubRequest) response.RPC {\n\tvar resp response.RPC\n\tauthResp, err := e.Auth(ctx, req.SessionToken, []string{common.PrivilegeConfigureComponents}, []string{})\n\tif authResp.StatusCode != http.StatusOK {\n\t\terrMsg := fmt.Sprintf(\"error while trying to authenticate session: status code: %v, status message: %v\", authResp.StatusCode, authResp.StatusMessage)\n\t\tif err != nil {\n\t\t\terrMsg = errMsg + \": \" + err.Error()\n\t\t}\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn authResp\n\t}\n\t// First get the UserName from SessionToken\n\tsessionUserName, err := e.GetSessionUserName(ctx, req.SessionToken)\n\tif err != nil {\n\t\t// handle the error case with appropriate response body\n\t\terrMsg := \"error while trying to authenticate session: \" + err.Error()\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusUnauthorized, response.NoValidSession, errMsg, nil, nil)\n\t}\n\n\ttestEvent, statusMessage, errMsg, msgArgs := validAndGenSubTestReq(req.PostBody)\n\tif statusMessage != response.Success {\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusBadRequest, statusMessage, errMsg, msgArgs, nil)\n\t}\n\n\t// parsing the event\n\tvar eventObj interface{}\n\terr = JSONUnmarshal(req.PostBody, &eventObj)\n\tif err != nil {\n\t\terrMsg := \"unable to parse the event request\" + err.Error()\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusInternalServerError, response.InternalError, errMsg, nil, nil)\n\t}\n\t// Validating the request JSON properties for case sensitive\n\tinvalidProperties, err := RequestParamsCaseValidatorFunc(req.PostBody, eventObj)\n\tif err != nil {\n\t\terrMsg := \"error while validating request parameters: \" + err.Error()\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusInternalServerError, response.InternalError, errMsg, nil, nil)\n\t} else if invalidProperties != \"\" {\n\t\terrorMessage := \"error: one or more properties given in the request body are not valid, ensure properties are listed in upper camel case \"\n\t\tl.LogWithFields(ctx).Error(errorMessage)\n\t\tresp := common.GeneralError(http.StatusBadRequest, response.PropertyUnknown, errorMessage, []interface{}{invalidProperties}, nil)\n\t\treturn resp\n\t}\n\t// Find out all the subscription destinations of the requesting user\n\tsubscriptions, err := e.GetEvtSubscriptions(sessionUserName)\n\tif err != nil {\n\t\t// Internal error\n\t\terrMsg := \"error while trying to find the event destination\"\n\t\tl.LogWithFields(ctx).Error(errMsg)\n\t\treturn common.GeneralError(http.StatusInternalServerError, response.InternalError, errMsg, nil, nil)\n\t}\n\t// we need common.MessageData to find the correct destination to send test event\n\tvar message common.MessageData\n\tmessage.Events = append(message.Events, *testEvent)\n\tmessageBytes, _ := json.Marshal(message)\n\teventUniqueID := uuid.NewV4().String()\n\tfor _, sub := range subscriptions {\n\t\tfor _, origin := range sub.EventDestination.OriginResources {\n\t\t\tif sub.EventDestination.Destination != \"\" {\n\t\t\t\tsubscription := *sub.EventDestination\n\t\t\t\tsubscription.ID = sub.SubscriptionID\n\n\t\t\t\tif filterEventsToBeForwarded(ctx, subscription, message.Events[0], []model.Link{{Oid: origin.Oid}}) {\n\t\t\t\t\tl.LogWithFields(ctx).Info(\"Destination: \" + sub.EventDestination.Destination)\n\t\t\t\t\tgo e.postEvent(evmodel.EventPost{Destination: sub.EventDestination.Destination, EventID: eventUniqueID,\n\t\t\t\t\t\tMessage: messageBytes})\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tresp.StatusCode = http.StatusOK\n\tresp.StatusMessage = response.Success\n\tresp.Body = response.ErrorClass{\n\t\tCode: resp.StatusMessage,\n\t\tMessage: \"Request completed successfully.\",\n\t}\n\treturn resp\n\n}", "func (db *DB) Upsert(form url.Values, dataFields interface{}) *ResponseMessage {\n\n\t//bucketName := \"master_erp\"\n\t//docID := \"12121\"\n\tbytes := db.ProcessData(form, dataFields)\n\t//db.ProcessData(form, intrfc)\n\t//json.Unmarshal(bytes, intrfc) //***s\n\n\t//fmt.Println(\"DATA>>>>\", intrfc)\n\n\t//bytes, _ := json.Marshal(intrfc)\n\t//fmt.Println(\"intrfcBytes:\", string(bytes))\n\n\tdocID := form.Get(\"aid\") //docid=aid\n\tbucketName := form.Get(\"bucket\")\n\n\tinsertQuery := upsertQueryBuilder(bucketName, docID, string(bytes))\n\t//insertQuery := insertQueryBuilder(bucketName, docID, intrfc)\n\n\t//fmt.Println(insertQuery)\n\tnqlInsertStatement := sqlStatementJSON(insertQuery)\n\tresponseMessage := db.queryRequest(nqlInsertStatement)\n\n\treturn responseMessage\n}", "func (obj *SObject) Upsert() *SObject {\n\tlog.Println(logPrefix, \"ExternalID:\", obj.ExternalID())\n\tlog.Println(logPrefix, \"ExternalIDField:\", obj.ExternalIDFieldName())\n\tif obj.Type() == \"\" || obj.client() == nil || obj.ExternalIDFieldName() == \"\" ||\n\t\tobj.ExternalID() == \"\" {\n\t\t// Sanity check.\n\t\tlog.Println(logPrefix, \"required fields are missing\")\n\t\treturn nil\n\t}\n\n\t// Make a copy of the incoming SObject, but skip certain metadata fields as they're not understood by salesforce.\n\treqObj := obj.makeCopy()\n\treqData, err := json.Marshal(reqObj)\n\tif err != nil {\n\t\tlog.Println(logPrefix, \"failed to convert sobject to json,\", err)\n\t\treturn nil\n\t}\n\n\tqueryBase := \"sobjects/\"\n\tif obj.client().useToolingAPI {\n\t\tqueryBase = \"tooling/sobjects/\"\n\t}\n\turl := obj.client().\n\t\tmakeURL(queryBase + obj.Type() + \"/\" + obj.ExternalIDFieldName() + \"/\" + obj.ExternalID())\n\trespData, err := obj.client().httpRequest(http.MethodPatch, url, bytes.NewReader(reqData))\n\tif err != nil {\n\t\tlog.Println(logPrefix, \"failed to process http request,\", err)\n\t\treturn nil\n\t}\n\n\t// Upsert returns with 201 and id in response if a new record is created. If a record is updated, it returns\n\t// a 204 with an empty response\n\tif len(respData) > 0 {\n\t\terr = obj.setIDFromResponseData(respData)\n\t\tif err != nil {\n\t\t\tlog.Println(logPrefix, \"failed to parse response,\", err)\n\t\t\treturn nil\n\t\t}\n\t}\n\n\treturn obj\n}", "func (m *UserUpdatePayload) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (oee *OtxEpubEpub) Upsert(db XODB) error {\n\tvar err error\n\n\t// if already exist, bail\n\tif oee._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\t// sql query\n\tconst sqlstr = `INSERT INTO public.otx_epub_epub (` +\n\t\t`document_ptr_id, publisher, source, oebps_folder, manifest, contents` +\n\t\t`) VALUES (` +\n\t\t`$1, $2, $3, $4, $5, $6` +\n\t\t`) ON CONFLICT (document_ptr_id) DO UPDATE SET (` +\n\t\t`document_ptr_id, publisher, source, oebps_folder, manifest, contents` +\n\t\t`) = (` +\n\t\t`EXCLUDED.document_ptr_id, EXCLUDED.publisher, EXCLUDED.source, EXCLUDED.oebps_folder, EXCLUDED.manifest, EXCLUDED.contents` +\n\t\t`)`\n\n\t// run query\n\tXOLog(sqlstr, oee.DocumentPtrID, oee.Publisher, oee.Source, oee.OebpsFolder, oee.Manifest, oee.Contents)\n\t_, err = db.Exec(sqlstr, oee.DocumentPtrID, oee.Publisher, oee.Source, oee.OebpsFolder, oee.Manifest, oee.Contents)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set existence\n\toee._exists = true\n\n\treturn nil\n}", "func (o *PostAPIV2EventsBody) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.validateAttributes(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.validateEntityID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := o.validateEvent(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *CatalogItemResourceUpfrontPriceResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateStatus(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (payload *PostEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (payload *postEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (p *AutoCommitter) Upsert(pair []interface{}) (e error) {\n\tif p.started {\n\t\tp.docsUpsert <- pair\n\t} else {\n\t\te = errors.New(fmt.Sprintf(\"AutoCommitter-%s(%s)_is_closed\", p.name, p.coll))\n\t}\n\treturn\n}", "func (b EmployeeUpdatedEvent) ValidateEmployeeUpdatedEvent() error {\n\tvar validate *validator.Validate\n\tvalidate = validator.New()\n\terr := validate.Struct(b)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn err\n}", "func (r *USPSResponse) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {\n\tparseEvent := func(e *uspsEvent) Update {\n\t\td := fmt.Sprintf(\"%v %v\", e.EventDate.Text, e.EventTime.Text)\n\t\tdt, err := time.Parse(\"January 2, 2006 15:04 pm\", d)\n\t\tif err != nil {\n\t\t\tlog.Debug.Println(err) // this isn't serious enough to warrant a return\n\t\t}\n\n\t\tup := Update{\n\t\t\tDateTime: dt,\n\t\t\tStatus: e.Event.Text,\n\t\t}\n\n\t\tup.Location.City = e.EventCity.Text\n\t\tup.Location.State = e.EventState.Text\n\t\tup.Location.Country = e.EventCountry.Text\n\t\treturn up\n\t}\n\n\ttype alias USPSResponse\n\traw := &alias{}\n\n\terr := d.DecodeElement(&raw, &start)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tr.Response.TrackingNumber = raw.uspsTrackResponse.TrackInfo.AttrID\n\n\tup := parseEvent(raw.uspsTrackResponse.TrackInfo.TrackSummary)\n\tr.Response.Updates = append(r.Response.Updates, up)\n\n\tfor _, a := range raw.uspsTrackResponse.TrackInfo.TrackDetail {\n\t\tup := parseEvent(a)\n\t\tr.Response.Updates = append(r.Response.Updates, up)\n\t}\n\n\tr.Response.URL = fmt.Sprintf(\"https://tools.usps.com/go/TrackConfirmAction?origTrackNum=%v\", r.Response.TrackingNumber)\n\treturn err\n}", "func (m *UpdateTenantV1Response) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Updated\n\n\treturn nil\n}", "func (m *RemoveMeetingV1Response) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\treturn nil\n}", "func (m *LolPlayerLevelUpPlayerLevelUpEvent) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (uo *UpdateOptions) SetUpsert(b bool) *UpdateOptions {\n\tuo.Upsert = &b\n\treturn uo\n}", "func (service *EntriesService) Upsert(spaceID, contentTypeID string, e *Entry) error {\n\tbytesArray, err := json.Marshal(e)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar path string\n\tvar method string\n\n\tif e.Sys != nil && e.Sys.ID != \"\" {\n\t\tpath = fmt.Sprintf(\"/spaces/%s/entries/%s\", spaceID, e.Sys.ID)\n\t\tmethod = \"PUT\"\n\t} else {\n\t\tpath = fmt.Sprintf(\"/spaces/%s/entries\", spaceID)\n\t\tmethod = \"POST\"\n\t}\n\n\treq, err := service.c.newRequest(method, path, nil, bytes.NewReader(bytesArray))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"X-Contentful-Content-Type\", contentTypeID)\n\treq.Header.Set(\"X-Contentful-Version\", strconv.Itoa(e.GetVersion()))\n\n\treturn service.c.do(req, e)\n}", "func ConvertUserEventRequestToUsageEvent(req CreateUserEventRequest) (*usageeventsv1.UsageEventOneOf, error) {\n\tswitch req.Event {\n\tcase bannerClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiBannerClick{\n\t\t\t\tUiBannerClick: &usageeventsv1.UIBannerClickEvent{\n\t\t\t\t\tAlert: req.Alert,\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceClick{\n\t\t\t\tUiOnboardAddFirstResourceClick: &usageeventsv1.UIOnboardAddFirstResourceClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceLaterClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceLaterClick{\n\t\t\t\tUiOnboardAddFirstResourceLaterClick: &usageeventsv1.UIOnboardAddFirstResourceLaterClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiIntegrationEnrollStartEvent,\n\t\tuiIntegrationEnrollCompleteEvent:\n\n\t\tvar event IntegrationEnrollEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tkindEnum, ok := usageeventsv1.IntegrationEnrollKind_value[event.Kind]\n\t\tif !ok {\n\t\t\treturn nil, trace.BadParameter(\"invalid integration enroll kind %s\", event.Kind)\n\t\t}\n\n\t\tswitch req.Event {\n\t\tcase uiIntegrationEnrollStartEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollStartEvent{\n\t\t\t\tUiIntegrationEnrollStartEvent: &usageeventsv1.UIIntegrationEnrollStartEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\tcase uiIntegrationEnrollCompleteEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollCompleteEvent{\n\t\t\t\tUiIntegrationEnrollCompleteEvent: &usageeventsv1.UIIntegrationEnrollCompleteEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\t}\n\n\tcase uiDiscoverStartedEvent,\n\t\tuiDiscoverResourceSelectionEvent,\n\t\tuiDiscoverIntegrationAWSOIDCConnectEvent,\n\t\tuiDiscoverDatabaseRDSEnrollEvent,\n\t\tuiDiscoverDeployServiceEvent,\n\t\tuiDiscoverDatabaseRegisterEvent,\n\t\tuiDiscoverDatabaseConfigureMTLSEvent,\n\t\tuiDiscoverDatabaseConfigureIAMPolicyEvent,\n\t\tuiDiscoverDesktopActiveDirectoryToolsInstallEvent,\n\t\tuiDiscoverDesktopActiveDirectoryConfigureEvent,\n\t\tuiDiscoverAutoDiscoveredResourcesEvent,\n\t\tuiDiscoverPrincipalsConfigureEvent,\n\t\tuiDiscoverTestConnectionEvent,\n\t\tuiDiscoverCompletedEvent:\n\n\t\tvar discoverEvent DiscoverEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &discoverEvent); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tevent, err := discoverEvent.ToUsageEvent(req.Event)\n\t\tif err != nil {\n\t\t\treturn nil, trace.BadParameter(\"failed to convert eventData: %v\", err)\n\t\t}\n\t\treturn event, nil\n\n\tcase createNewRoleClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleClick{\n\t\t\t\tUiCreateNewRoleClick: &usageeventsv1.UICreateNewRoleClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleSaveClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleSaveClick{\n\t\t\t\tUiCreateNewRoleSaveClick: &usageeventsv1.UICreateNewRoleSaveClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleCancelClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleCancelClick{\n\t\t\t\tUiCreateNewRoleCancelClick: &usageeventsv1.UICreateNewRoleCancelClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleViewDocumentationClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleViewDocumentationClick{\n\t\t\t\tUiCreateNewRoleViewDocumentationClick: &usageeventsv1.UICreateNewRoleViewDocumentationClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiCallToActionClickEvent:\n\t\tvar cta int32\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &cta); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCallToActionClickEvent{\n\t\t\t\tUiCallToActionClickEvent: &usageeventsv1.UICallToActionClickEvent{\n\t\t\t\t\tCta: usageeventsv1.CTA(cta),\n\t\t\t\t}}},\n\t\t\tnil\n\n\tcase questionnaireSubmitEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardQuestionnaireSubmit{\n\t\t\t\tUiOnboardQuestionnaireSubmit: &usageeventsv1.UIOnboardQuestionnaireSubmitEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase featureRecommendationEvent:\n\t\tevent := struct {\n\t\t\tFeature int32 `json:\"feature\"`\n\t\t\tFeatureRecommendationStatus int32 `json:\"featureRecommendationStatus\"`\n\t\t}{}\n\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_FeatureRecommendationEvent{\n\t\t\t\tFeatureRecommendationEvent: &usageeventsv1.FeatureRecommendationEvent{\n\t\t\t\t\tFeature: usageeventsv1.Feature(event.Feature),\n\t\t\t\t\tFeatureRecommendationStatus: usageeventsv1.FeatureRecommendationStatus(event.FeatureRecommendationStatus),\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\t}\n\n\treturn nil, trace.BadParameter(\"invalid event %s\", req.Event)\n}", "func (o *CMFFamilyUserPoliciesTake) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_family_user_policies_take provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfFamilyUserPoliciesTakeColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFFamilyUserPoliciesTakeUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfFamilyUserPoliciesTakeUpsertCacheMut.RLock()\n\tcache, cached := cmfFamilyUserPoliciesTakeUpsertCache[key]\n\tcmfFamilyUserPoliciesTakeUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfFamilyUserPoliciesTakeAllColumns,\n\t\t\tcmfFamilyUserPoliciesTakeColumnsWithDefault,\n\t\t\tcmfFamilyUserPoliciesTakeColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfFamilyUserPoliciesTakeAllColumns,\n\t\t\tcmfFamilyUserPoliciesTakePrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_family_user_policies_take, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_family_user_policies_take`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_family_user_policies_take` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_family_user_policies_take\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfFamilyUserPoliciesTakeMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_family_user_policies_take\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_family_user_policies_take\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfFamilyUserPoliciesTakeUpsertCacheMut.Lock()\n\t\tcmfFamilyUserPoliciesTakeUpsertCache[key] = cache\n\t\tcmfFamilyUserPoliciesTakeUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (e EventHandler) PostEvent(ctx context.Context, params api.PostEventParams) middleware.Responder {\n\t// fmt.Println(params.HTTPRequest.Header.Get(\"X-API-Key\"))\n\t// validateErr := u.userService.Validate(params.HTTPRequest.Header.Get(\"X-API-Key\"), \"\")\n\t// if validateErr != nil {\n\t// \terr := models.Error{\n\t// \t\tMessage: validateErr.Error(),\n\t// \t}\n\t// \treturn api.NewRegisterUserUnauthorized().WithPayload(&err)\n\t// }\n\teventRes := &models.EventResponse{}\n\tpostStatus, err := e.eventService.CreateEvent(params.EventBody)\n\n\tfmt.Println(\"postStatus:\", *postStatus)\n\tif err != nil {\n\t\tpostErr := models.Response{\n\t\t\tCode: 400,\n\t\t\tStatus: \"Failed\",\n\t\t\tMessage: err.Error(),\n\t\t}\n\t\tlogrus.Warnf(postErr.Message)\n\t\treturn api.NewPostEventBadRequest().WithPayload(&postErr)\n\t}\n\tlogrus.Infoln(\"The event has been created in the DB:\", *postStatus)\n\n\teventRes.Response = &models.Response{\n\t\tCode: 200,\n\t\tStatus: \"Success\",\n\t\tMessage: \"Event Posted Successfully\",\n\t}\n\tparams.EventBody.ID = *postStatus\n\teventRes.Event = params.EventBody\n\treturn api.NewPostEventOK().WithPayload(eventRes)\n}", "func (umm *UpdateManyModel) SetUpsert(upsert bool) *UpdateManyModel {\n\tumm.Upsert = &upsert\n\treturn umm\n}", "func (m *PublishResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Status\n\n\treturn nil\n}", "func (o *UpdateUserUnprocessableEntity) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(422)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (m *GetEventByIDResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Participants\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetUpdatedAt()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\tfield: \"UpdatedAt\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t// no validation rules for IsOpened\n\n\t// no validation rules for IsApproved\n\n\treturn nil\n}", "func (e UpsertEventResponseValidationError) Key() bool { return e.key }", "func (m *UpsertEventRequest_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (m *CreateEmployeeResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for EmpNo\n\n\treturn nil\n}", "func EncodeUnsealResponse(_ context.Context, response interface{}) (interface{}, error) {\n\tresp := response.(endpoints.UnsealResponse)\n\n\tstatus := &pb.SealStatus{\n\t\tSealed: resp.Sealed,\n\t\tT: uint32(resp.T),\n\t\tN: uint32(resp.N),\n\t\tProgress: uint32(resp.Progress),\n\t\tVersion: resp.Version,\n\t\tClusterName: resp.ClusterName,\n\t\tClusterId: resp.ClusterID,\n\t}\n\treturn &pb.UnsealResponse{\n\t\tSealStatus: status,\n\t\tErr: service.Error2String(resp.Err),\n\t}, nil\n}", "func (e UpsertEventRequest_OptionValidationError) Cause() error { return e.cause }", "func CreateUpdateEndpointGroupResponse() (response *UpdateEndpointGroupResponse) {\n\tresponse = &UpdateEndpointGroupResponse{\n\t\tBaseResponse: &responses.BaseResponse{},\n\t}\n\treturn\n}", "func (o *Failure) Upsert(exec boil.Executor, updateColumns []string, whitelist ...string) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no failure provided for upsert\")\n\t}\n\tcurrTime := time.Now().In(boil.GetLocation())\n\n\tif o.CreatedAt.IsZero() {\n\t\to.CreatedAt = currTime\n\t}\n\to.UpdatedAt = currTime\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(failureColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs postgres problems\n\tbuf := strmangle.GetBuffer()\n\tfor _, c := range updateColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range whitelist {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tfailureUpsertCacheMut.RLock()\n\tcache, cached := failureUpsertCache[key]\n\tfailureUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := strmangle.InsertColumnSet(\n\t\t\tfailureColumns,\n\t\t\tfailureColumnsWithDefault,\n\t\t\tfailureColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t\twhitelist,\n\t\t)\n\n\t\tupdate := strmangle.UpdateColumnSet(\n\t\t\tfailureColumns,\n\t\t\tfailurePrimaryKeyColumns,\n\t\t\tupdateColumns,\n\t\t)\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert failure, could not build update column list\")\n\t\t}\n\n\t\tcache.query = queries.BuildUpsertQueryMySQL(dialect, \"failure\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `failure` WHERE `id`=?\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(failureType, failureMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(failureType, failureMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.Exec(cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for failure\")\n\t}\n\n\tvar lastID int64\n\tvar identifierCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == failureMapping[\"ID\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tidentifierCols = []interface{}{\n\t\to.ID,\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, identifierCols...)\n\t}\n\n\terr = exec.QueryRow(cache.retQuery, identifierCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for failure\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tfailureUpsertCacheMut.Lock()\n\t\tfailureUpsertCache[key] = cache\n\t\tfailureUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}", "func (m *PostIdentitiesResponseProfileData) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *Failure) UpsertP(exec boil.Executor, updateColumns []string, whitelist ...string) {\n\tif err := o.Upsert(exec, updateColumns, whitelist...); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (m *ModelsUserAchievementResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateAchievedAt(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateAchievementCode(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateLatestValue(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\tif err := m.validateStatus(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (e UpsertEventResponseValidationError) Field() string { return e.field }", "func (o *AuthMessage) UpsertP(exec boil.Executor, updateColumns []string, whitelist ...string) {\n\tif err := o.Upsert(exec, updateColumns, whitelist...); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func EventSignupCreate(ctx *gin.Context) {\n\tvar user *model.User\n\tif userInterface, exists := ctx.Get(\"User\"); !exists {\n\t\tmisc.ReturnStandardError(ctx, http.StatusForbidden, \"you have to be a registered user to create signup record\")\n\t\treturn\n\t} else {\n\t\tuser = userInterface.(*model.User)\n\t}\n\teventSignup := &model.EventSignup{}\n\tif err := jsonapi.UnmarshalPayload(ctx.Request.Body, eventSignup); err != nil {\n\t\tmisc.ReturnStandardError(ctx, http.StatusBadRequest, \"cannot unmarshal JSON of request\")\n\t\treturn\n\t} else if eventSignup.Event == nil || eventSignup.Event.ID <= 0 {\n\t\tmisc.ReturnStandardError(ctx, http.StatusBadRequest, \"invalid event ID\")\n\t\treturn\n\t}\n\tdb := ctx.MustGet(\"DB\").(*gorm.DB)\n\tevent := model.Event{}\n\tif err := db.Where(eventSignup.Event).First(&event).Error; errors.Is(err, gorm.ErrRecordNotFound) {\n\t\tmisc.ReturnStandardError(ctx, http.StatusNotFound, \"specified event cannot be found\")\n\t\treturn\n\t} else if err != nil {\n\t\tmisc.ReturnStandardError(ctx, http.StatusInternalServerError, err.Error())\n\t\treturn\n\t}\n\tif *event.OrganizerID == user.ID {\n\t\tmisc.ReturnStandardError(ctx, http.StatusBadRequest, \"you cannot signup events organized by yourself\")\n\t\treturn\n\t}\n\teventSignup.EventID = &event.ID\n\teventSignup.Event = &event\n\teventSignup.UserID = &user.ID\n\teventSignup.User = user\n\tif err := db.Save(eventSignup).Error; err != nil {\n\t\tmisc.ReturnStandardError(ctx, http.StatusInternalServerError, err.Error())\n\t} else {\n\t\tctx.Status(http.StatusCreated)\n\t\tif err := jsonapi.MarshalPayload(ctx.Writer, eventSignup); err != nil {\n\t\t\tmisc.ReturnStandardError(ctx, http.StatusInternalServerError, err.Error())\n\t\t}\n\t}\n}", "func (o *PutTeamsTeamIDMembersUsernameUnprocessableEntity) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(422)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (o *Employee) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no employee provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(employeeColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\temployeeUpsertCacheMut.RLock()\n\tcache, cached := employeeUpsertCache[key]\n\temployeeUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeeColumnsWithDefault,\n\t\t\temployeeColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeePrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert employee, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(employeePrimaryKeyColumns))\n\t\t\tcopy(conflict, employeePrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"employee\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(employeeType, employeeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(employeeType, employeeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert employee\")\n\t}\n\n\tif !cached {\n\t\temployeeUpsertCacheMut.Lock()\n\t\temployeeUpsertCache[key] = cache\n\t\temployeeUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (t AlterPartitionReassignmentsResponse) Encode(e *Encoder, version int16) {\n\te.PutInt32(t.ThrottleTimeMs) // ThrottleTimeMs\n\te.PutInt16(t.ErrorCode) // ErrorCode\n\te.PutString(t.ErrorMessage) // ErrorMessage\n\t// Responses\n\tlen3 := len(t.Responses)\n\te.PutArrayLength(len3)\n\tfor i := 0; i < len3; i++ {\n\t\tt.Responses[i].Encode(e, version)\n\t}\n}", "func (fwdclient *Client) SubmitEvent(evt EventRequest) error {\n\tlog.Debugf(\"%s: url=%s\", fwdclient.AppName, fwdclient.ActionUrls.Raw)\n\tb := new(bytes.Buffer)\n\tjson.NewEncoder(b).Encode(evt)\n\treq, err := http.NewRequest(\"POST\", fwdclient.ActionUrls.Raw, b)\n\tif err != nil {\n\t\treturn err\n\t}\n\treq.Header.Set(\"Authorization\", fmt.Sprintf(\"Splunk %s\", fwdclient.Token))\n\tresp, err := fwdclient.httpclient.Do(req)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"%s: Authorization Token is inorrect. Error: %s\", fwdclient.AppName, err)\n\t}\n\tdefer resp.Body.Close()\n\tlog.Debugf(\"%s: status=%d %s\", fwdclient.AppName, resp.StatusCode, http.StatusText(resp.StatusCode))\n\n\trespBody, err := ioutil.ReadAll(resp.Body)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"%s: failed: response : %s\", fwdclient.AppName, err)\n\t}\n\teventResponse := new(EventResponse)\n\tif err := json.Unmarshal(respBody, eventResponse); err != nil {\n\t\treturn fmt.Errorf(\"%s: failed: Response is not JSON formate: %s\", fwdclient.AppName, respBody)\n\t}\n\tif resp.StatusCode != 200 {\n\t\treturn fmt.Errorf(\"%s: failed: %d %s (%s)\", fwdclient.AppName, resp.StatusCode, http.StatusText(resp.StatusCode), eventResponse.Text)\n\t}\n\tlog.Debugf(\"%s: code=%d, text=%s\", fwdclient.AppName, eventResponse.Code, eventResponse.Text)\n\treturn nil\n}", "func (o *PostPunchInOKBody) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.validateResponse(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *UpdateMegaParProfileResponse) Validate() error {\n\treturn nil\n}", "func CreateReleaseAnycastEipAddressResponse() (response *ReleaseAnycastEipAddressResponse) {\n\tresponse = &ReleaseAnycastEipAddressResponse{\n\t\tBaseResponse: &responses.BaseResponse{},\n\t}\n\treturn\n}", "func UpdateEvent(c *gin.Context) {\n\tvar inp model.Event\n\n\tc.BindJSON(&inp)\n\tc.JSON(http.StatusOK, serviceEvent.UpdateEvent(&inp))\n}", "func (e *Event) Encode(w io.Writer) error {\n\tresp := dialogflow.Event{\n\t\tFollowupEventInput: &dialogflow.FollowupEventInput{\n\t\t\tName: e.Name,\n\t\t\tParameters: e.Parameters,\n\t\t\t// LanguageCode: e.LanguageCode.String(),\n\t\t},\n\t}\n\tif e.LanguageCode != nil {\n\t\tresp.FollowupEventInput.LanguageCode = e.LanguageCode.String()\n\t}\n\treturn resp.Encode(w)\n}", "func (e *Event) UnmarshalJSON(b []byte) error {\n\tvar values map[string]interface{}\n\terr := json.Unmarshal(b, &values)\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfor item, value := range values {\n\t\tswitch item {\n\t\tcase \"eventId\":\n\t\t\te.EventID = int32(value.(float64))\n\t\tcase \"dateWithYear\":\n\t\t\te.DateWithYear = value.(string)\n\t\tcase \"eventNumber\":\n\t\t\te.EventNumber = int32(value.(float64))\n\t\tcase \"eventUrl\":\n\t\t\te.EventURL = value.(string)\n\t\tcase \"results\":\n\t\t\tif value == nil {\n\t\t\t\te.Results = \"\"\n\t\t\t} else if r, ok := value.(string); ok {\n\t\t\t\te.Results = r\n\t\t\t} else if r, ok := value.(int); ok {\n\t\t\t\te.Results = strconv.Itoa(r)\n\t\t\t}\n\t\tcase \"eventName\":\n\t\t\tif value == nil {\n\t\t\t\te.EventName = \"\"\n\t\t\t} else {\n\t\t\t\te.EventName = value.(string)\n\t\t\t}\n\t\tcase \"isImminent\":\n\t\t\tif value == nil {\n\t\t\t\te.IsImminent = false\n\t\t\t} else {\n\t\t\t\te.IsImminent = value.(bool)\n\t\t\t}\n\t\tcase \"startTime\":\n\t\t\te.StartTime = int64(value.(float64))\n\t\tcase \"resulted\":\n\t\t\tif value == nil {\n\t\t\t\te.Resulted = 0\n\t\t\t} else {\n\t\t\t\te.Resulted = int32(value.(float64))\n\t\t\t}\n\t\tcase \"isAbandoned\":\n\t\t\tif value == nil {\n\t\t\t\te.IsAbandoned = 0\n\t\t\t} else {\n\t\t\t\te.IsAbandoned = int32(value.(float64))\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (m *RuleConfigManager) Upsert(key string, r *RuleConfig, opts ...RequestOption) (err error) {\n\treturn m.Request(\"PUT\", m.URI(\"rules-configs\", key), r, opts...)\n}", "func (o *DMessageEmbed) UpsertP(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns []string, whitelist ...string) {\n\tif err := o.Upsert(exec, updateOnConflict, conflictColumns, updateColumns, whitelist...); err != nil {\n\t\tpanic(boil.WrapErr(err))\n\t}\n}", "func (o *DMessageEmbed) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns []string, whitelist ...string) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no d_message_embeds provided for upsert\")\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(dMessageEmbedColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs postgres problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range updateColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range whitelist {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tdMessageEmbedUpsertCacheMut.RLock()\n\tcache, cached := dMessageEmbedUpsertCache[key]\n\tdMessageEmbedUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tvar ret []string\n\t\twhitelist, ret = strmangle.InsertColumnSet(\n\t\t\tdMessageEmbedColumns,\n\t\t\tdMessageEmbedColumnsWithDefault,\n\t\t\tdMessageEmbedColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t\twhitelist,\n\t\t)\n\t\tupdate := strmangle.UpdateColumnSet(\n\t\t\tdMessageEmbedColumns,\n\t\t\tdMessageEmbedPrimaryKeyColumns,\n\t\t\tupdateColumns,\n\t\t)\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert d_message_embeds, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(dMessageEmbedPrimaryKeyColumns))\n\t\t\tcopy(conflict, dMessageEmbedPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = queries.BuildUpsertQueryPostgres(dialect, \"\\\"d_message_embeds\\\"\", updateOnConflict, ret, update, conflict, whitelist)\n\n\t\tcache.valueMapping, err = queries.BindMapping(dMessageEmbedType, dMessageEmbedMapping, whitelist)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(dMessageEmbedType, dMessageEmbedMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert d_message_embeds\")\n\t}\n\n\tif !cached {\n\t\tdMessageEmbedUpsertCacheMut.Lock()\n\t\tdMessageEmbedUpsertCache[key] = cache\n\t\tdMessageEmbedUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (upp *UpdatePlayerPayload) Validate() []string {\n\tv := NewValidation()\n\tv.validateRequiredString(\"name\", upp.Name)\n\tv.validateRequired(\"metadata\", upp.Metadata)\n\treturn v.Errors()\n}", "func (ocHandler *OrderConstraintsOverridesHandler) Upsert(pair *model.TradingPair, override *model.OrderConstraintsOverride) {\n\texistingOverride, exists := ocHandler.overrides[pair.String()]\n\tif !exists {\n\t\tocHandler.overrides[pair.String()] = override\n\t\treturn\n\t}\n\n\texistingOverride.Augment(override)\n\tocHandler.overrides[pair.String()] = existingOverride\n}", "func (e UpsertEventRequest_OptionValidationError) Reason() string { return e.reason }", "func (e VulnEvent) Validate() error {\n\tif e.SourceName == \"\" {\n\t\treturn fmt.Errorf(\"must set SourceName in event\")\n\t}\n\tif e.Asset.IPAddress == \"\" && e.Asset.Hostname == \"\" {\n\t\treturn fmt.Errorf(\"must set IPAddress or Hostname in event\")\n\t}\n\treturn nil\n}", "func NewSummaryEventResponseWithDefaults() *SummaryEventResponse {\n\tthis := SummaryEventResponse{}\n\treturn &this\n}", "func (e StreamEventsResponseValidationError) Cause() error { return e.cause }", "func (m *DeleteEmployeeResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for EmpNo\n\n\treturn nil\n}", "func (service *EntriesService) Upsert(spaceID string, entry *Entry) error {\n\tfieldsOnly := map[string]interface{}{\n\t\t\"fields\": entry.Fields,\n\t}\n\n\tbytesArray, err := json.Marshal(fieldsOnly)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Creating/updating an entry requires a content type to be provided\n\tif entry.Sys.ContentType == nil {\n\t\treturn fmt.Errorf(\"creating/updating an entry requires a content type\")\n\t}\n\n\tvar path string\n\tvar method string\n\n\tif entry.Sys != nil && entry.Sys.ID != \"\" {\n\t\tpath = fmt.Sprintf(\"/spaces/%s%s/entries/%s\", spaceID, getEnvPath(service.c), entry.Sys.ID)\n\t\tmethod = \"PUT\"\n\t} else {\n\t\tpath = fmt.Sprintf(\"/spaces/%s%s/entries\", spaceID, getEnvPath(service.c))\n\t\tmethod = \"POST\"\n\t}\n\n\treq, err := service.c.newRequest(method, path, nil, bytes.NewReader(bytesArray))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"X-Contentful-Version\", strconv.Itoa(entry.GetVersion()))\n\treq.Header.Set(\"X-Contentful-Content-Type\", entry.Sys.ContentType.Sys.ID)\n\n\treturn service.c.do(req, entry)\n}", "func (r *Rest) PostEvent(url *types.URI, e event.Event) error {\n\tb, err := json.Marshal(e)\n\tif err != nil {\n\t\tlog.Errorf(\"error marshalling event %v\", e)\n\t\treturn err\n\t}\n\n\tif status := r.Post(url, b); status == http.StatusBadRequest {\n\t\treturn fmt.Errorf(\"post returned status %d\", status)\n\t}\n\treturn nil\n}", "func (h *EcrHandler) Upsert(obj interface{}) error {\n\tns := obj.(*v1.Namespace)\n\n\t// Don't process if excluded\n\tif skip := h.ifSkip(ns.GetName()); skip {\n\t\t// If exist, delete from excluded namespace\n\t\tif s, err := lc.GetSecret(h.client, ns.GetName(), h.SecretName, metav1.GetOptions{}); s.Name != \"\" {\n\t\t\terr = lc.DeleteSecret(h.client, ns.GetName(), h.SecretName, &metav1.DeleteOptions{})\n\t\t\tif err != nil {\n\t\t\t\tlog.Printf(\"Failed to delete existing secret %s in namespace %s\", h.SecretName, ns.GetName())\n\t\t\t}\n\t\t\tlog.Printf(\"Deleted existing secret %s in namespace %s\", h.SecretName, ns.GetName())\n\t\t}\n\n\t\tlog.Printf(\"Ignoring excluded namespace %s in update\", ns.GetName())\n\t\treturn nil\n\t}\n\n\tsecret, err := h.buildEcrSecret(ns.GetName())\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// Check if it's a new secret\n\ts, err := lc.GetSecret(h.client, ns.GetName(), h.SecretName, metav1.GetOptions{})\n\tif s.Name == \"\" {\n\t\t// Create\n\t\t_, err = lc.CreateSecret(h.client, ns.GetName(), secret)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to create ECR secret %s for namespace %s\", h.SecretName, ns.GetName())\n\t\t\treturn err\n\t\t}\n\n\t\tlog.Printf(\"Successfully created ECR secret %s for namespace %s\", h.SecretName, ns.GetName())\n\t} else {\n\t\t// Update\n\t\t_, err = lc.UpdateSecret(h.client, ns.GetName(), secret)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to update ECR secret %s for namespace %s\", h.SecretName, ns.GetName())\n\t\t\treturn err\n\t\t}\n\n\t\tlog.Printf(\"Successfully updated ECR secret %s for namespace %s\", h.SecretName, ns.GetName())\n\t}\n\n\treturn nil\n}", "func (m *V3DisruptionResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateDisruption(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateStatus(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (ut *updateUserPayload) Validate() (err error) {\n\tif ut.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"name\"))\n\t}\n\tif ut.Email == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"email\"))\n\t}\n\tif ut.Bio == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"bio\"))\n\t}\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`request.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\tif ut.Name != nil {\n\t\tif ok := goa.ValidatePattern(`\\S`, *ut.Name); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`request.name`, *ut.Name, `\\S`))\n\t\t}\n\t}\n\tif ut.Name != nil {\n\t\tif utf8.RuneCountInString(*ut.Name) > 256 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`request.name`, *ut.Name, utf8.RuneCountInString(*ut.Name), 256, false))\n\t\t}\n\t}\n\treturn\n}", "func (r *EventTagsService) Update(profileId int64, eventtag *EventTag) *EventTagsUpdateCall {\n\tc := &EventTagsUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)}\n\tc.profileId = profileId\n\tc.eventtag = eventtag\n\treturn c\n}", "func (fc *FakeCollection) Upsert(col string, value interface{}, opts *gocb.UpsertOptions) (*gocb.MutationResult, error) {\n\tif fc.Force == \"error\" {\n\t\treturn &gocb.MutationResult{}, errors.New(\"Forced collection upsert error\")\n\t}\n\treturn &gocb.MutationResult{}, nil\n}", "func (m *EventsListResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEvents(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *AddAttendeeToTalkOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(200)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (o *UpdateUserOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(200)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (tsResp *TimestampResponse) SetErr(err error) *TimestampResponse {\n\ttsResp.Err = NewError(err)\n\treturn tsResp\n}", "func (o *Vendor) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no vendors provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(vendorColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLVendorUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tvendorUpsertCacheMut.RLock()\n\tcache, cached := vendorUpsertCache[key]\n\tvendorUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tvendorColumns,\n\t\t\tvendorColumnsWithDefault,\n\t\t\tvendorColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tvendorColumns,\n\t\t\tvendorPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert vendors, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"vendors\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `vendors` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(vendorType, vendorMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(vendorType, vendorMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for vendors\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == vendorMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(vendorType, vendorMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for vendors\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for vendors\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tvendorUpsertCacheMut.Lock()\n\t\tvendorUpsertCache[key] = cache\n\t\tvendorUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (svc *Service) Update(ownerID string, event *calendar.Event) (*calendar.Event, error) {\n\tmsEvent := convertToOutlookEvent(event)\n\t_, err := svc.client.Patch(fmt.Sprintf(\"%s/%s\", eventsURL(ownerID), event.Id), msEvent)\n\tif err != nil {\n\t\treturn &calendar.Event{}, errors.Wrap(err, \"Unable to perform Update\")\n\t}\n\treturn event, nil\n}", "func (e UpdateMeetingV1ResponseValidationError) Cause() error { return e.cause }", "func (m *InvalidSessionResponseError) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateParkingSessionID(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *UserEditConflict) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses\n\n\trw.WriteHeader(409)\n}", "func (e UpsertEventRequest_QuestionValidationError) Cause() error { return e.cause }", "func (ut *UpdateUserPayload) Validate() (err error) {\n\tif ut.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"name\"))\n\t}\n\tif ut.Email == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"email\"))\n\t}\n\tif ut.Bio == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"bio\"))\n\t}\n\tif err2 := goa.ValidateFormat(goa.FormatEmail, ut.Email); err2 != nil {\n\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`type.email`, ut.Email, goa.FormatEmail, err2))\n\t}\n\tif ok := goa.ValidatePattern(`\\S`, ut.Name); !ok {\n\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`type.name`, ut.Name, `\\S`))\n\t}\n\tif utf8.RuneCountInString(ut.Name) > 256 {\n\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`type.name`, ut.Name, utf8.RuneCountInString(ut.Name), 256, false))\n\t}\n\treturn\n}", "func (o *Offer) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"stellarcore: no offers provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(offerColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tofferUpsertCacheMut.RLock()\n\tcache, cached := offerUpsertCache[key]\n\tofferUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tofferColumns,\n\t\t\tofferColumnsWithDefault,\n\t\t\tofferColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tofferColumns,\n\t\t\tofferPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"stellarcore: unable to upsert offers, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(offerPrimaryKeyColumns))\n\t\t\tcopy(conflict, offerPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"offers\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(offerType, offerMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(offerType, offerMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"stellarcore: unable to upsert offers\")\n\t}\n\n\tif !cached {\n\t\tofferUpsertCacheMut.Lock()\n\t\tofferUpsertCache[key] = cache\n\t\tofferUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}" ]
[ "0.6024578", "0.56647646", "0.5632995", "0.5198404", "0.51051587", "0.5098731", "0.5006001", "0.49907067", "0.49874976", "0.49772435", "0.49045727", "0.48979566", "0.48484412", "0.4822549", "0.4822338", "0.4765159", "0.4759106", "0.47494254", "0.472523", "0.4723187", "0.47133335", "0.47119385", "0.4686604", "0.4648566", "0.46397552", "0.46255565", "0.46168953", "0.46114546", "0.46033862", "0.45871323", "0.45730567", "0.4566306", "0.45652345", "0.4555141", "0.45393598", "0.45175603", "0.45166004", "0.4503666", "0.4496857", "0.4473625", "0.44659093", "0.44447356", "0.44437668", "0.44311738", "0.44124487", "0.44036615", "0.43917137", "0.4374467", "0.43736166", "0.435272", "0.43519005", "0.43432027", "0.4305602", "0.43034062", "0.4301063", "0.42965308", "0.42958838", "0.4289861", "0.4279306", "0.4270398", "0.42602006", "0.42579356", "0.4246845", "0.42385954", "0.42323983", "0.42266408", "0.4223883", "0.42216086", "0.42212734", "0.42184", "0.42178702", "0.4203639", "0.41958073", "0.41905433", "0.41897464", "0.4188156", "0.41865146", "0.4184593", "0.4183098", "0.4180362", "0.41750747", "0.41719165", "0.41677976", "0.4167729", "0.41617024", "0.41561213", "0.41545096", "0.41520405", "0.41404516", "0.4139314", "0.41372278", "0.41363576", "0.41347092", "0.41340503", "0.41332057", "0.4111496", "0.41081744", "0.41047442", "0.41010344", "0.41003984" ]
0.69944584
0
Field function returns field value.
func (e UpsertEventResponseValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e UpsertEventResponseValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e UpsertEventResponseValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.6580937
85
Key function returns key value.
func (e UpsertEventResponseValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on GetEventByIDRequest with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *GetEventByIDRequest) Validate() error { if m == nil { return nil } // no validation rules for Id return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *GetEventByIDResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Participants\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetUpdatedAt()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\tfield: \"UpdatedAt\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t// no validation rules for IsOpened\n\n\t// no validation rules for IsApproved\n\n\treturn nil\n}", "func GetByID(rw http.ResponseWriter, r *http.Request) {\n\tuserID := r.Header.Get(\"userid\")\n\n\turlParams := strings.Replace(r.URL.String(), \"/api/getEvent/\", \"\", 1)\n\teventID := strings.Split(urlParams, \"/\")[0]\n\n\tif strings.TrimSpace(eventID) == \"\" {\n\t\tlog.Printf(\"Missing event id\\n\")\n\t\trw.WriteHeader(http.StatusBadRequest)\n\t\trw.Write([]byte(\"Event ID must be provided\"))\n\t\treturn\n\t}\n\n\tquery := `SELECT * FROM events\n\t\tWHERE owner_id = $1 \n\t\tAND id = $2`\n\n\te := Event{}\n\tloc := sql.NullString{}\n\tnotes := sql.NullString{}\n\n\terr := conn.DB.QueryRow(query, userID, eventID).Scan(&e.EventID, &e.Title, &e.StartTime, &e.EndTime, &loc, &notes, &e.OwnerID)\n\tif err != nil {\n\t\tlog.Printf(\"DB error: %s\\n\", err)\n\t\trw.WriteHeader(http.StatusNoContent)\n\t\trw.Write([]byte(\"Event not found\"))\n\t\treturn\n\t}\n\n\te.Location = loc.String\n\te.Notes = notes.String\n\n\trw.WriteHeader(http.StatusOK)\n\trw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(rw).Encode(e)\n}", "func (g *GetChatEventLogRequest) GetFromEventID() (value int64) {\n\tif g == nil {\n\t\treturn\n\t}\n\treturn g.FromEventID\n}", "func (g *Github) GetEventID(ctx context.Context, r *http.Request) (string, error) {\n\tid := r.Header.Get(\"X-GitHub-Delivery\")\n\tif id == \"\" {\n\t\treturn \"\", errors.New(\"event id not found for request\")\n\t}\n\treturn id, nil\n}", "func (e EventHandler) GetEventbyID(ctx context.Context, params api.GetEventbyIDParams) middleware.Responder {\n\n\teventResponse := &models.EventResponse{}\n\tevent, err := e.eventService.GetEventByID(params.ID)\n\tif err != nil {\n\t\tpostErr := fmt.Errorf(\"failed to get the event: %w\", err)\n\t\tlogrus.Warnf(postErr.Error())\n\t\tgetEventErr := &models.Response{\n\t\t\tStatus: \"Failed\",\n\t\t\tCode: 400,\n\t\t\tMessage: postErr.Error(),\n\t\t}\n\t\treturn api.NewGetEventbyIDBadRequest().WithPayload(getEventErr)\n\n\t}\n\teventResponse.Response = &models.Response{\n\t\tCode: 200,\n\t\tStatus: \"Success\",\n\t\tMessage: \"Event has been fetched Successfully\",\n\t}\n\teventResponse.Event = event\n\n\tlogrus.Infoln(\"The event has been fetched:\", event)\n\n\treturn api.NewGetEventbyIDOK().WithPayload(eventResponse)\n}", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func decodeGetByIDRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"id\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid ID\")\n\t}\n\treq := endpoint.GetByIDRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "func decodeGetByIDRequest(_ context.Context, r *http1.Request) (interface{}, error) {\n\tvars := mux.Vars(r)\n\tid, ok := vars[\"id\"]\n\tif !ok {\n\t\treturn nil, errors.New(\"not a valid ID\")\n\t}\n\treq := endpoint.GetByIDRequest{\n\t\tId: id,\n\t}\n\treturn req, nil\n}", "func NewGetEventRequest(server string, id string) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParam(\"simple\", false, \"id\", id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tqueryUrl, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbasePath := fmt.Sprintf(\"/events/%s\", pathParam0)\n\tif basePath[0] == '/' {\n\t\tbasePath = basePath[1:]\n\t}\n\n\tqueryUrl, err = queryUrl.Parse(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"GET\", queryUrl.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn req, nil\n}", "func (ec *EventController) GetByID(ctx context.Context, id primitive.ObjectID) (*Event, error) {\n\tvar event Event\n\teventResult := ec.collection.FindOne(ctx, bson.M{\"_id\": id})\n\tif eventResult.Err() != nil {\n\t\tif eventResult.Err() == mongo.ErrNoDocuments {\n\t\t\treturn nil, errors.NewServerError(\"No events found\", http.StatusNotFound)\n\t\t}\n\t\treturn nil, eventResult.Err()\n\t}\n\n\teventResult.Decode(&event)\n\n\treturn &event, nil\n}", "func GetByID(id int) (entities.Event, error) {\r\n\treturn entities.Event{}, nil\r\n}", "func (o *GetEventLogsUsingGETParams) SetEventID(eventID strfmt.UUID) {\n\to.EventID = eventID\n}", "func (m *StreamEventsRequest_Identifier) Validate() error {\n\treturn m.validate(false)\n}", "func (service EventService) GetEventByID(id uint64) ([]byte, error) {\n\tEvent := service.repository.GetEventByID(id)\n\treturn json.Marshal(Event)\n}", "func (o *GetEventsEventIDOK) WithPayload(payload *GetEventsEventIDOKBody) *GetEventsEventIDOK {\n\to.Payload = payload\n\treturn o\n}", "func GetEventByID(id string) (EventCacheResponse, error) {\n\tdb, err := getDatabase()\n\tvar r EventCacheResponse\n\n\tif err != nil {\n\t\treturn r, err\n\t}\n\n\tdefer db.Close()\n\n\tstmt, err := db.Prepare(\"select id, json, transport, event from events where id = ?\")\n\tif err != nil {\n\t\treturn r, err\n\t}\n\tdefer stmt.Close()\n\n\terr = stmt.QueryRow(id).Scan(&r.ID, &r.JSON, &r.Transport, &r.Event)\n\tif err != nil {\n\t\treturn r, err\n\t}\n\n\treturn r, err\n}", "func NewGetaspecificEventRequest(server string, id string) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParam(\"simple\", false, \"id\", id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tqueryUrl, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbasePath := fmt.Sprintf(\"/events/%s\", pathParam0)\n\tif basePath[0] == '/' {\n\t\tbasePath = basePath[1:]\n\t}\n\n\tqueryUrl, err = queryUrl.Parse(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"GET\", queryUrl.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn req, nil\n}", "func (m *Request) GetID() uint64 {\n\treturn m.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *PropertyValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ValidationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func NewGetEventsEventIDOK() *GetEventsEventIDOK {\n\n\treturn &GetEventsEventIDOK{}\n}", "func (_InboxHelperTester *InboxHelperTesterCaller) RequestID(opts *bind.CallOpts, messageNum *big.Int, rollup common.Address) ([32]byte, error) {\n\tvar out []interface{}\n\terr := _InboxHelperTester.contract.Call(opts, &out, \"requestID\", messageNum, rollup)\n\n\tif err != nil {\n\t\treturn *new([32]byte), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new([32]byte)).(*[32]byte)\n\n\treturn out0, err\n\n}", "func (o *SubmitReplayRequestEntity) GetEventId() int64 {\n\tif o == nil || o.EventId == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.EventId\n}", "func GetEvent(ID string) (Event, error) {\n\tintID, err := strconv.ParseInt(ID, 10, 64)\n\tif err != nil {\n\t\treturn Event{}, errors.New(fmt.Sprintln(\"Can not convert ID:\", err))\n\t}\n\n\trows, err := db.Query(\"SELECT * FROM events WHERE id=?\", intID)\n\tif err != nil {\n\t\treturn Event{}, err\n\t}\n\tdefer rows.Close()\n\n\te := Event{}\n\tif rows.Next() {\n\t\tvar s sql.NullString\n\t\tvar intDate int64\n\t\tvar intID int64\n\t\terr = rows.Scan(&intID, &e.Type, &e.User, &e.Topic, &intDate, &e.Data, &s)\n\t\tif err != nil {\n\t\t\treturn e, err\n\t\t}\n\t\te.ID = strconv.FormatInt(intID, 10)\n\t\te.Date = time.Unix(intDate, 0)\n\t\tif s.Valid {\n\t\t\te.AffectedUser = s.String\n\t\t}\n\t} else {\n\t\treturn e, errors.New(\"Can not read topic data\")\n\t}\n\treturn e, nil\n}", "func (o *GetEventsEventIDInternalServerError) WithPayload(payload *GetEventsEventIDInternalServerErrorBody) *GetEventsEventIDInternalServerError {\n\to.Payload = payload\n\treturn o\n}", "func (s *OrganizationalUnitNotEmptyException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (c *EventClient) Get(ctx context.Context, id int) (*Event, error) {\n\treturn c.Query().Where(event.ID(id)).Only(ctx)\n}", "func (o *SubmitReplayRequestEntity) GetEventIdOk() (*int64, bool) {\n\tif o == nil || o.EventId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.EventId, true\n}", "func (e GetEventByIDResponseValidationError) Cause() error { return e.cause }", "func NewGetEmployeesIdRequest(server string, id string) (*http.Request, error) {\n\tvar err error\n\n\tvar pathParam0 string\n\n\tpathParam0, err = runtime.StyleParam(\"simple\", false, \"id\", id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tqueryUrl, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbasePath := fmt.Sprintf(\"/employees/%s\", pathParam0)\n\tif basePath[0] == '/' {\n\t\tbasePath = basePath[1:]\n\t}\n\n\tqueryUrl, err = queryUrl.Parse(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq, err := http.NewRequest(\"GET\", queryUrl.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn req, nil\n}", "func (m *EventItemRequestBuilder) Get(ctx context.Context, requestConfiguration *EventItemRequestBuilderGetRequestConfiguration)(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Eventable, error) {\n requestInfo, err := m.CreateGetRequestInformation(ctx, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.requestAdapter.SendAsync(ctx, requestInfo, iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.CreateEventFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Eventable), nil\n}", "func (s *OrganizationNotEmptyException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *MissingRequiredParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *MissingRequiredParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidInputException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func NewGetInstancesEventByEventIDParamsWithHTTPClient(client *http.Client) *GetInstancesEventByEventIDParams {\n\tvar ()\n\treturn &GetInstancesEventByEventIDParams{\n\t\tHTTPClient: client,\n\t}\n}", "func (es *EventService) Get(eventID string) (e Event, err error) {\n\t// GET: /event/:eventID\n\tvar req *http.Request\n\treq, err = es.c.NewRequest(\"GET\", \"/event/\"+eventID, nil)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tresp := struct {\n\t\tStatus string\n\t\tData Event\n\t\tMessage string\n\t}{}\n\n\terr = es.c.Do(req, &resp)\n\treturn resp.Data, err\n}", "func (c *EventService) Get(id string) (Event, *http.Response, error) {\n\toutput := &struct {\n\t\tData Event `json:\"data\"`\n\t}{}\n\tpath := fmt.Sprintf(\"%s/%s\", c.endpoint, id)\n\tresp, err := doGet(c.sling, path, output)\n\treturn output.Data, resp, err\n}", "func (h RequestMessageHeader) RequestID() interface{} {\n\treturn h.ID\n}", "func (client *RoleDefinitionsClient) getByIDCreateRequest(ctx context.Context, roleID string, options *RoleDefinitionsGetByIDOptions) (*policy.Request, error) {\n\turlPath := \"/{roleId}\"\n\turlPath = strings.ReplaceAll(urlPath, \"{roleId}\", roleID)\n\treq, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.ep, urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2018-01-01-preview\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header.Set(\"Accept\", \"application/json\")\n\treturn req, nil\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *NonEmptyEntityException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidParameterException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *ConstraintViolationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (_VinTracker *VinTrackerTransactor) GetEventById(opts *bind.TransactOpts, history *big.Int) (*types.Transaction, error) {\n\treturn _VinTracker.contract.Transact(opts, \"getEventById\", history)\n}", "func (s *InvalidEndpointException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (c *Client) BuildByIDRequest(ctx context.Context, v interface{}) (*http.Request, error) {\n\tvar (\n\t\tid uint\n\t)\n\t{\n\t\tp, ok := v.(*resource.ByIDPayload)\n\t\tif !ok {\n\t\t\treturn nil, goahttp.ErrInvalidType(\"resource\", \"ById\", \"*resource.ByIDPayload\", v)\n\t\t}\n\t\tid = p.ID\n\t}\n\tu := &url.URL{Scheme: c.scheme, Host: c.host, Path: ByIDResourcePath(id)}\n\treq, err := http.NewRequest(\"GET\", u.String(), nil)\n\tif err != nil {\n\t\treturn nil, goahttp.ErrInvalidURL(\"resource\", \"ById\", u.String(), err)\n\t}\n\tif ctx != nil {\n\t\treq = req.WithContext(ctx)\n\t}\n\n\treturn req, nil\n}", "func (s *MasterCannotLeaveOrganizationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InternalServiceException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InternalServiceException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InternalServiceException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidPolicyDocument) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func GetEvent(id uint) (*entity.Event,error){\n\tclient := &http.Client{}\n\tURL := fmt.Sprintf(\"%s%s%d\",baseEventURL,\"event/\",id)\n\treq,_ := http.NewRequest(\"GET\",URL,nil)\n\n\t//DO return an http response\n\tres,err := client.Do(req)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\teventdata := &entity.Event{}\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(body,eventdata)\n\tif err != nil{\n\t\treturn nil,err\n\t}\n\treturn eventdata,nil\n\n}", "func (s *ServiceException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (h *eventServiceHTTPHandler) GetEventInfo(c echo.Context) error {\n\tlogCtx := fmt.Sprintf(\"%T.GetEventInfo\", *h)\n\n\teventID, err := strconv.ParseUint(c.QueryParam(\"id\"), 10, 64)\n\tif err != nil {\n\t\thelper.Log(logrus.ErrorLevel, err.Error(), logCtx, \"error_parse_uint\")\n\t\treturn helper.NewResponse(http.StatusInternalServerError, http.StatusInternalServerError, err.Error(), nil).WriteResponse(c)\n\t}\n\n\tif eventID <= 0 {\n\t\terr := errors.New(\"invalid event id\")\n\t\thelper.Log(logrus.ErrorLevel, err.Error(), logCtx, \"error_event_id\")\n\t\treturn helper.NewResponse(http.StatusBadRequest, http.StatusBadRequest, err.Error(), nil).WriteResponse(c)\n\t}\n\n\tresp, err := h.eventUseCase.GetEventInformation(eventID)\n\tif err != nil {\n\t\thelper.Log(logrus.ErrorLevel, err.Error(), logCtx, \"error_get_event_information\")\n\t\treturn helper.NewResponse(http.StatusBadRequest, http.StatusBadRequest, err.Error(), nil).WriteResponse(c)\n\t}\n\n\tdata := make(map[string]interface{})\n\tdata[\"event\"] = resp\n\treturn helper.NewResponse(http.StatusOK, http.StatusOK, \"Success\", data).WriteResponse(c)\n}", "func (e GetEventByIDRequestValidationError) Reason() string { return e.reason }", "func (s *HandshakeConstraintViolationException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (g *MessagesGetScheduledMessagesRequest) GetID() (value []int) {\n\tif g == nil {\n\t\treturn\n\t}\n\treturn g.ID\n}", "func (s *ResourceShareInvitationExpiredException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (_InboxHelperTester *InboxHelperTesterSession) RequestID(messageNum *big.Int, rollup common.Address) ([32]byte, error) {\n\treturn _InboxHelperTester.Contract.RequestID(&_InboxHelperTester.CallOpts, messageNum, rollup)\n}", "func (o *GetEventLogsUsingGETParams) WithEventID(eventID strfmt.UUID) *GetEventLogsUsingGETParams {\n\to.SetEventID(eventID)\n\treturn o\n}", "func (r *DeviceManagementAutopilotEventRequest) Get(ctx context.Context) (resObj *DeviceManagementAutopilotEvent, err error) {\n\tvar query string\n\tif r.query != nil {\n\t\tquery = \"?\" + r.query.Encode()\n\t}\n\terr = r.JSONRequest(ctx, \"GET\", query, nil, &resObj)\n\treturn\n}", "func (e StreamEventsRequest_IdentifierValidationError) Cause() error { return e.cause }", "func (pub *Publisher) GetInputByID(inputID string) *types.InputDiscoveryMessage {\n\treturn pub.registeredInputs.GetInputByID(inputID)\n}", "func (s *RangeNotSatisfiableException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *OutboundContactNotPermittedException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (e ExecuteServiceEvent) EventID() uint64 {\n\treturn e.ID\n}", "func (s *ContactFlowNotPublishedException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func getID(event *types.Event) string {\n\tvar output string\n\t// if len(event.ID)>0{\n\t// \toutput = string(event.ID)\n\t// } else{\n\t\toutput = event.Check.Name\n\t// }\n\treturn output\n}", "func NewGetEventsRequest(\n\tserver string,\n\tparams *GetEventsParams,\n) (*http.Request, error) {\n\tvar err error\n\n\tqueryUrl, err := url.Parse(server)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tbasePath := fmt.Sprintf(\"/events\")\n\tif basePath[0] == '/' {\n\t\tbasePath = basePath[1:]\n\t}\n\n\tqueryUrl, err = queryUrl.Parse(basePath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tqueryValues := queryUrl.Query()\n\n\tif params.Limit != nil {\n\n\t\tif queryFrag, err := runtime.StyleParam(\"form\", true, \"limit\", *params.Limit); err != nil {\n\t\t\treturn nil, err\n\t\t} else if parsed, err := url.ParseQuery(queryFrag); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tfor k, v := range parsed {\n\t\t\t\tfor _, v2 := range v {\n\t\t\t\t\tqueryValues.Add(k, v2)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif params.StartingAfter != nil {\n\n\t\tif queryFrag, err := runtime.StyleParam(\"form\", true, \"starting_after\", *params.StartingAfter); err != nil {\n\t\t\treturn nil, err\n\t\t} else if parsed, err := url.ParseQuery(queryFrag); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tfor k, v := range parsed {\n\t\t\t\tfor _, v2 := range v {\n\t\t\t\t\tqueryValues.Add(k, v2)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif params.EndingBefore != nil {\n\n\t\tif queryFrag, err := runtime.StyleParam(\"form\", true, \"ending_before\", *params.EndingBefore); err != nil {\n\t\t\treturn nil, err\n\t\t} else if parsed, err := url.ParseQuery(queryFrag); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tfor k, v := range parsed {\n\t\t\t\tfor _, v2 := range v {\n\t\t\t\t\tqueryValues.Add(k, v2)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif params.School != nil {\n\n\t\tif queryFrag, err := runtime.StyleParam(\"form\", true, \"school\", *params.School); err != nil {\n\t\t\treturn nil, err\n\t\t} else if parsed, err := url.ParseQuery(queryFrag); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tfor k, v := range parsed {\n\t\t\t\tfor _, v2 := range v {\n\t\t\t\t\tqueryValues.Add(k, v2)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif params.RecordType != nil {\n\n\t\tif queryFrag, err := runtime.StyleParam(\"form\", true, \"record_type\", *params.RecordType); err != nil {\n\t\t\treturn nil, err\n\t\t} else if parsed, err := url.ParseQuery(queryFrag); err != nil {\n\t\t\treturn nil, err\n\t\t} else {\n\t\t\tfor k, v := range parsed {\n\t\t\t\tfor _, v2 := range v {\n\t\t\t\t\tqueryValues.Add(k, v2)\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tqueryUrl.RawQuery = queryValues.Encode()\n\n\treq, err := http.NewRequest(\"GET\", queryUrl.String(), nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn req, nil\n}", "func (s *RejectedRecordsException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (_InboxHelperTester *InboxHelperTesterCallerSession) RequestID(messageNum *big.Int, rollup common.Address) ([32]byte, error) {\n\treturn _InboxHelperTester.Contract.RequestID(&_InboxHelperTester.CallOpts, messageNum, rollup)\n}", "func (s *InvalidRequestException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidRequestException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}", "func (s *InvalidRequestException) RequestID() string {\n\treturn s.RespMetadata.RequestID\n}" ]
[ "0.69761455", "0.62794083", "0.61478055", "0.6009446", "0.5853159", "0.5708041", "0.56953514", "0.56953514", "0.5529281", "0.5498734", "0.5492505", "0.5488298", "0.5448844", "0.5447197", "0.54463845", "0.54295224", "0.54211223", "0.5369132", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53529", "0.53520113", "0.5350367", "0.5340261", "0.5333542", "0.5323618", "0.53071976", "0.52373725", "0.52264315", "0.5216449", "0.5212382", "0.51904243", "0.5188866", "0.5182151", "0.5169765", "0.5167805", "0.5167805", "0.5164345", "0.51413816", "0.51392627", "0.51280034", "0.51153916", "0.50959", "0.5093184", "0.5093184", "0.5093184", "0.5093184", "0.5093184", "0.5093184", "0.5093184", "0.509114", "0.5091005", "0.5081473", "0.50809455", "0.5077736", "0.50731015", "0.5054287", "0.50381714", "0.5037409", "0.5037409", "0.50357616", "0.50355345", "0.5033295", "0.50214607", "0.5000517", "0.49881876", "0.49827036", "0.4975306", "0.49744254", "0.4967815", "0.49622935", "0.4949204", "0.49399355", "0.49367613", "0.4932925", "0.49314788", "0.4931022", "0.4929021", "0.49246904", "0.49240857", "0.49228346", "0.49214467", "0.4920832", "0.4920832" ]
0.76998556
0
Field function returns field value.
func (e GetEventByIDRequestValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.6625801
23
Reason function returns reason value.
func (e GetEventByIDRequestValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.6565433
90
Key function returns key value.
func (e GetEventByIDRequestValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on GetEventByIDResponse with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *GetEventByIDResponse) Validate() error { if m == nil { return nil } // no validation rules for Id // no validation rules for Name // no validation rules for Participants for idx, item := range m.GetSections() { _, _ = idx, item if v, ok := interface{}(item).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return GetEventByIDResponseValidationError{ field: fmt.Sprintf("Sections[%v]", idx), reason: "embedded message failed validation", cause: err, } } } } if v, ok := interface{}(m.GetUpdatedAt()).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return GetEventByIDResponseValidationError{ field: "UpdatedAt", reason: "embedded message failed validation", cause: err, } } } // no validation rules for IsOpened // no validation rules for IsApproved return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *GetEventByIDRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (e EventHandler) GetEventbyID(ctx context.Context, params api.GetEventbyIDParams) middleware.Responder {\n\n\teventResponse := &models.EventResponse{}\n\tevent, err := e.eventService.GetEventByID(params.ID)\n\tif err != nil {\n\t\tpostErr := fmt.Errorf(\"failed to get the event: %w\", err)\n\t\tlogrus.Warnf(postErr.Error())\n\t\tgetEventErr := &models.Response{\n\t\t\tStatus: \"Failed\",\n\t\t\tCode: 400,\n\t\t\tMessage: postErr.Error(),\n\t\t}\n\t\treturn api.NewGetEventbyIDBadRequest().WithPayload(getEventErr)\n\n\t}\n\teventResponse.Response = &models.Response{\n\t\tCode: 200,\n\t\tStatus: \"Success\",\n\t\tMessage: \"Event has been fetched Successfully\",\n\t}\n\teventResponse.Event = event\n\n\tlogrus.Infoln(\"The event has been fetched:\", event)\n\n\treturn api.NewGetEventbyIDOK().WithPayload(eventResponse)\n}", "func GetByID(rw http.ResponseWriter, r *http.Request) {\n\tuserID := r.Header.Get(\"userid\")\n\n\turlParams := strings.Replace(r.URL.String(), \"/api/getEvent/\", \"\", 1)\n\teventID := strings.Split(urlParams, \"/\")[0]\n\n\tif strings.TrimSpace(eventID) == \"\" {\n\t\tlog.Printf(\"Missing event id\\n\")\n\t\trw.WriteHeader(http.StatusBadRequest)\n\t\trw.Write([]byte(\"Event ID must be provided\"))\n\t\treturn\n\t}\n\n\tquery := `SELECT * FROM events\n\t\tWHERE owner_id = $1 \n\t\tAND id = $2`\n\n\te := Event{}\n\tloc := sql.NullString{}\n\tnotes := sql.NullString{}\n\n\terr := conn.DB.QueryRow(query, userID, eventID).Scan(&e.EventID, &e.Title, &e.StartTime, &e.EndTime, &loc, &notes, &e.OwnerID)\n\tif err != nil {\n\t\tlog.Printf(\"DB error: %s\\n\", err)\n\t\trw.WriteHeader(http.StatusNoContent)\n\t\trw.Write([]byte(\"Event not found\"))\n\t\treturn\n\t}\n\n\te.Location = loc.String\n\te.Notes = notes.String\n\n\trw.WriteHeader(http.StatusOK)\n\trw.Header().Set(\"Content-Type\", \"application/json\")\n\tjson.NewEncoder(rw).Encode(e)\n}", "func NewGetEventsEventIDOK() *GetEventsEventIDOK {\n\n\treturn &GetEventsEventIDOK{}\n}", "func (m *UpsertEventResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (h *GenericEventHandler) GetEventResponse(event *github.Event) *EventResponse {\n\treturn &EventResponse{Message: \"Request received. Doing nothing.\"}\n}", "func (c *ClientWithResponses) GetEventWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getEventResponse, error) {\n\trsp, err := c.GetEvent(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetEventResponse(rsp)\n}", "func (service EventService) GetEventByID(id uint64) ([]byte, error) {\n\tEvent := service.repository.GetEventByID(id)\n\treturn json.Marshal(Event)\n}", "func (o *SubmitReplayRequestEntity) GetEventIdOk() (*int64, bool) {\n\tif o == nil || o.EventId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.EventId, true\n}", "func (c *ClientWithResponses) GetEmployeesIdWithResponse(ctx context.Context, id string) (*GetEmployeesIdResponse, error) {\n\trsp, err := c.GetEmployeesId(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetEmployeesIdResponse(rsp)\n}", "func (m *GetEventByIDResponse_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn GetEventByIDResponse_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *PatchEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewPatchEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewPatchEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewPatchEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 422:\n\t\tresult := NewPatchEventsEventIDUnprocessableEntity()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func GetEventByID(id string) (EventCacheResponse, error) {\n\tdb, err := getDatabase()\n\tvar r EventCacheResponse\n\n\tif err != nil {\n\t\treturn r, err\n\t}\n\n\tdefer db.Close()\n\n\tstmt, err := db.Prepare(\"select id, json, transport, event from events where id = ?\")\n\tif err != nil {\n\t\treturn r, err\n\t}\n\tdefer stmt.Close()\n\n\terr = stmt.QueryRow(id).Scan(&r.ID, &r.JSON, &r.Transport, &r.Event)\n\tif err != nil {\n\t\treturn r, err\n\t}\n\n\treturn r, err\n}", "func (o *GetEventsEventIDOK) WithPayload(payload *GetEventsEventIDOKBody) *GetEventsEventIDOK {\n\to.Payload = payload\n\treturn o\n}", "func (c *ClientWithResponses) GetaspecificEventWithResponse(ctx context.Context, id string) (*GetaspecificEventResponse, error) {\n\trsp, err := c.GetaspecificEvent(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetaspecificEventResponse(rsp)\n}", "func ParseGetEmployeesIdResponse(rsp *http.Response) (*GetEmployeesIdResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetEmployeesIdResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest Party\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (c *EventService) Get(id string) (Event, *http.Response, error) {\n\toutput := &struct {\n\t\tData Event `json:\"data\"`\n\t}{}\n\tpath := fmt.Sprintf(\"%s/%s\", c.endpoint, id)\n\tresp, err := doGet(c.sling, path, output)\n\treturn output.Data, resp, err\n}", "func (o *DeleteAPIV2EventsEventSubscriptionIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func encodeGetByIDResponse(ctx context.Context, w http1.ResponseWriter, response interface{}) (err error) {\n\tif f, ok := response.(endpoint.Failure); ok && f.Failed() != nil {\n\t\tErrorEncoder(ctx, f.Failed(), w)\n\t\treturn nil\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\terr = json.NewEncoder(w).Encode(response)\n\treturn\n}", "func encodeGetByIDResponse(ctx context.Context, w http1.ResponseWriter, response interface{}) (err error) {\n\tif f, ok := response.(endpoint.Failure); ok && f.Failed() != nil {\n\t\tErrorEncoder(ctx, f.Failed(), w)\n\t\treturn nil\n\t}\n\tw.Header().Set(\"Content-Type\", \"application/json; charset=utf-8\")\n\terr = json.NewEncoder(w).Encode(response)\n\treturn\n}", "func (o *GetEventLogsUsingGETParams) SetEventID(eventID strfmt.UUID) {\n\to.EventID = eventID\n}", "func (e GetEventByIDResponseValidationError) Reason() string { return e.reason }", "func (o *DeleteEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewDeleteEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewDeleteEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewDeleteEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (m *Response) GetID() uint64 {\n\treturn m.RequestID\n}", "func (o *GetCharactersCharacterIDCalendarEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 304:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotModified()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 400:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 401:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 403:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 420:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDEnhanceYourCalm()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 500:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 503:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDServiceUnavailable()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 504:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDGatewayTimeout()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (g *Github) GetEventID(ctx context.Context, r *http.Request) (string, error) {\n\tid := r.Header.Get(\"X-GitHub-Delivery\")\n\tif id == \"\" {\n\t\treturn \"\", errors.New(\"event id not found for request\")\n\t}\n\treturn id, nil\n}", "func (o *GetEventByUserEventIDAndEventTypeHandlerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 500:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tdata, err := ioutil.ReadAll(response.Body())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn nil, fmt.Errorf(\"Requested GET /event/namespaces/{namespace}/users/{userId}/eventType/{eventType}/eventId/{eventId} returns an error %d: %s\", response.Code(), string(data))\n\t}\n}", "func (e GetEventByIDResponseValidationError) Cause() error { return e.cause }", "func (o *EventoDTO) GetIdOk() (*int64, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func GetByID(id int) (entities.Event, error) {\r\n\treturn entities.Event{}, nil\r\n}", "func (g *GetChatEventLogRequest) GetFromEventID() (value int64) {\n\tif g == nil {\n\t\treturn\n\t}\n\treturn g.FromEventID\n}", "func (e ExecuteServiceEvent) EventID() uint64 {\n\treturn e.ID\n}", "func GetEventDetail(id string) EventResponse {\n\tlog.Println(\"Get event detail service initialize\")\n\t// call model to get a events\n\teventDetail := model.GetEvent(id)\n\n\t// create event response struct\n\teventsResponse := EventResponse{true, eventDetail}\n\n\t// sending a response\n\treturn eventsResponse\n}", "func (o *GetEventsEventIDInternalServerError) WithPayload(payload *GetEventsEventIDInternalServerErrorBody) *GetEventsEventIDInternalServerError {\n\to.Payload = payload\n\treturn o\n}", "func (client *RoleDefinitionsClient) getByIDHandleResponse(resp *http.Response) (RoleDefinitionsGetByIDResponse, error) {\n\tresult := RoleDefinitionsGetByIDResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleDefinition); err != nil {\n\t\treturn RoleDefinitionsGetByIDResponse{}, runtime.NewResponseError(err, resp)\n\t}\n\treturn result, nil\n}", "func GetEvent(id uint) (*entity.Event,error){\n\tclient := &http.Client{}\n\tURL := fmt.Sprintf(\"%s%s%d\",baseEventURL,\"event/\",id)\n\treq,_ := http.NewRequest(\"GET\",URL,nil)\n\n\t//DO return an http response\n\tres,err := client.Do(req)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\teventdata := &entity.Event{}\n\n\tbody, err := ioutil.ReadAll(res.Body)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\terr = json.Unmarshal(body,eventdata)\n\tif err != nil{\n\t\treturn nil,err\n\t}\n\treturn eventdata,nil\n\n}", "func ParseGetEventResponse(rsp *http.Response) (*getEventResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getEventResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest EventResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (es *EventService) Get(eventID string) (e Event, err error) {\n\t// GET: /event/:eventID\n\tvar req *http.Request\n\treq, err = es.c.NewRequest(\"GET\", \"/event/\"+eventID, nil)\n\tif err != nil {\n\t\treturn\n\t}\n\n\tresp := struct {\n\t\tStatus string\n\t\tData Event\n\t\tMessage string\n\t}{}\n\n\terr = es.c.Do(req, &resp)\n\treturn resp.Data, err\n}", "func (r *Response) ID() string { return r.id }", "func (r *Response) ID() string { return r.id }", "func (o *SubmitReplayRequestEntity) GetEventId() int64 {\n\tif o == nil || o.EventId == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.EventId\n}", "func (this *ResponseEvent) GetResponse() message.Response {\n\treturn this.m_response\n}", "func (m *StreamEventsResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (o *SubmitReplayRequestEntity) HasEventId() bool {\n\tif o != nil && o.EventId != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func NewGetEventsEventIDNotFound() *GetEventsEventIDNotFound {\n\n\treturn &GetEventsEventIDNotFound{}\n}", "func (c *EventClient) Get(ctx context.Context, id int) (*Event, error) {\n\treturn c.Query().Where(event.ID(id)).Only(ctx)\n}", "func (o *GetEventsEventIDOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(200)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (c Client) Event(id int) (*data.Event, error) {\n\turl := fmt.Sprintf(\"%v%v/%v\", c.BaseURL, \"events\", fmt.Sprint(id))\n\teventResponse := data.Event{}\n\terr := c.getResponse(url, &eventResponse)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn &eventResponse, nil\n}", "func (e EventHandler) DeleteEventbyID(ctx context.Context, params api.DeleteEventbyIDParams) middleware.Responder {\n\t// validateErr := u.userService.Validate(params.HTTPRequest.Header.Get(\"X-API-Key\"), params.ID)\n\t// if validateErr != nil {\n\t// \terr := models.Error{\n\t// \t\tMessage: validateErr.Error(),\n\t// \t}\n\t// \treturn api.NewRegisterUserUnauthorized().WithPayload(&err)\n\t// }\n\tstatus, err := e.eventService.DeleteEventByID(params.ID)\n\tif err != nil {\n\t\tpostErr := fmt.Errorf(\"Deletion of the event failed: %w\", err)\n\t\tlogrus.Warnf(postErr.Error())\n\t\tErrstatus := models.Response{}\n\t\tErrstatus.Code = 500\n\t\tErrstatus.Status = \"Failed\"\n\t\tErrstatus.Message = postErr.Error()\n\t\treturn api.NewDeleteEventbyIDInternalServerError().WithPayload(&Errstatus)\n\t}\n\tmodelStatus := models.Response{}\n\tif status {\n\t\tlogrus.Infoln(\"The event has been deleted in the DB:\", status)\n\t\tmodelStatus.Code = 200\n\t\tmodelStatus.Status = \"Success\"\n\t\tmodelStatus.Message = \"The event has been deleted from the DB\"\n\t}\n\n\treturn api.NewDeleteEventbyIDOK().WithPayload(&modelStatus)\n}", "func (m *GetEventByIDResponse_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Description\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (m *DeleteEmployeeResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for EmpNo\n\n\treturn nil\n}", "func GetEvent(ID string) (Event, error) {\n\tintID, err := strconv.ParseInt(ID, 10, 64)\n\tif err != nil {\n\t\treturn Event{}, errors.New(fmt.Sprintln(\"Can not convert ID:\", err))\n\t}\n\n\trows, err := db.Query(\"SELECT * FROM events WHERE id=?\", intID)\n\tif err != nil {\n\t\treturn Event{}, err\n\t}\n\tdefer rows.Close()\n\n\te := Event{}\n\tif rows.Next() {\n\t\tvar s sql.NullString\n\t\tvar intDate int64\n\t\tvar intID int64\n\t\terr = rows.Scan(&intID, &e.Type, &e.User, &e.Topic, &intDate, &e.Data, &s)\n\t\tif err != nil {\n\t\t\treturn e, err\n\t\t}\n\t\te.ID = strconv.FormatInt(intID, 10)\n\t\te.Date = time.Unix(intDate, 0)\n\t\tif s.Valid {\n\t\t\te.AffectedUser = s.String\n\t\t}\n\t} else {\n\t\treturn e, errors.New(\"Can not read topic data\")\n\t}\n\treturn e, nil\n}", "func (h *eventServiceHTTPHandler) GetEventInfo(c echo.Context) error {\n\tlogCtx := fmt.Sprintf(\"%T.GetEventInfo\", *h)\n\n\teventID, err := strconv.ParseUint(c.QueryParam(\"id\"), 10, 64)\n\tif err != nil {\n\t\thelper.Log(logrus.ErrorLevel, err.Error(), logCtx, \"error_parse_uint\")\n\t\treturn helper.NewResponse(http.StatusInternalServerError, http.StatusInternalServerError, err.Error(), nil).WriteResponse(c)\n\t}\n\n\tif eventID <= 0 {\n\t\terr := errors.New(\"invalid event id\")\n\t\thelper.Log(logrus.ErrorLevel, err.Error(), logCtx, \"error_event_id\")\n\t\treturn helper.NewResponse(http.StatusBadRequest, http.StatusBadRequest, err.Error(), nil).WriteResponse(c)\n\t}\n\n\tresp, err := h.eventUseCase.GetEventInformation(eventID)\n\tif err != nil {\n\t\thelper.Log(logrus.ErrorLevel, err.Error(), logCtx, \"error_get_event_information\")\n\t\treturn helper.NewResponse(http.StatusBadRequest, http.StatusBadRequest, err.Error(), nil).WriteResponse(c)\n\t}\n\n\tdata := make(map[string]interface{})\n\tdata[\"event\"] = resp\n\treturn helper.NewResponse(http.StatusOK, http.StatusOK, \"Success\", data).WriteResponse(c)\n}", "func (c *ClientWithResponses) GetExpensesIdWithResponse(ctx context.Context, id string) (*GetExpensesIdResponse, error) {\n\trsp, err := c.GetExpensesId(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetExpensesIdResponse(rsp)\n}", "func (e DeployServiceEvent) EventID() uint64 {\n\treturn e.ID\n}", "func (m Message) GetSecurityResponseID(f *field.SecurityResponseIDField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}", "func (a *Campaigns_ChallengesApiService) GetChallengeEvent(ctx context.Context, id int64) (ChallengeEventResource, *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Get\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t \tsuccessPayload ChallengeEventResource\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/challenges/events/{id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", fmt.Sprintf(\"%v\", id), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn successPayload, nil, err\n\t}\n\n\t localVarHttpResponse, err := a.client.callAPI(r)\n\t if err != nil || localVarHttpResponse == nil {\n\t\t return successPayload, localVarHttpResponse, err\n\t }\n\t defer localVarHttpResponse.Body.Close()\n\t if localVarHttpResponse.StatusCode >= 300 {\n\t\treturn successPayload, localVarHttpResponse, reportError(localVarHttpResponse.Status)\n\t }\n\t\n\tif err = json.NewDecoder(localVarHttpResponse.Body).Decode(&successPayload); err != nil {\n\t \treturn successPayload, localVarHttpResponse, err\n\t}\n\n\n\treturn successPayload, localVarHttpResponse, err\n}", "func GetOneEvent(c *gin.Context) {\n\tc.JSON(http.StatusOK, serviceEvent.GetOneEvent(c.Param(\"id\")))\n}", "func ParseGetEventsResponse(rsp *http.Response) (*getEventsResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getEventsResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest EventsResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (o *WatchlistScreeningIndividualUpdateResponse) GetId() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Id\n}", "func (c *ClientWithResponses) GetEventsWithResponse(\n\tctx context.Context,\n\tparams *GetEventsParams,\n) (*getEventsResponse, error) {\n\trsp, err := c.GetEvents(ctx, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetEventsResponse(rsp)\n}", "func (o *WatchlistScreeningIndividualUpdateResponse) GetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (o *UserInvitationResponseData) GetId() string {\n\tif o == nil || o.Id == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Id\n}", "func (o *MemberResponse) GetId() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Id\n}", "func (m *EventRSVPMutation) EventID() (id int, exists bool) {\n\tif m.event != nil {\n\t\treturn *m.event, true\n\t}\n\treturn\n}", "func ParseGetaspecificEventResponse(rsp *http.Response) (*GetaspecificEventResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetaspecificEventResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest Events\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (ec *EventController) GetByID(ctx context.Context, id primitive.ObjectID) (*Event, error) {\n\tvar event Event\n\teventResult := ec.collection.FindOne(ctx, bson.M{\"_id\": id})\n\tif eventResult.Err() != nil {\n\t\tif eventResult.Err() == mongo.ErrNoDocuments {\n\t\t\treturn nil, errors.NewServerError(\"No events found\", http.StatusNotFound)\n\t\t}\n\t\treturn nil, eventResult.Err()\n\t}\n\n\teventResult.Decode(&event)\n\n\treturn &event, nil\n}", "func (o *EventAttributes) GetRelatedEventIdOk() (*int64, bool) {\n\tif o == nil || o.RelatedEventId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.RelatedEventId, true\n}", "func NewGetEventsEventIDMethodNotAllowed() *GetEventsEventIDMethodNotAllowed {\n\n\treturn &GetEventsEventIDMethodNotAllowed{}\n}", "func (m *EventRSVPMutation) SetEventID(id int) {\n\tm.event = &id\n}", "func (e *TodoRegistered) GetEventID() string {\n\treturn e.EventID\n}", "func (o *GetEventsEventIDNotFound) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {\n\n\trw.WriteHeader(404)\n\tif o.Payload != nil {\n\t\tpayload := o.Payload\n\t\tif err := producer.Produce(rw, payload); err != nil {\n\t\t\tpanic(err) // let the recovery middleware deal with this\n\t\t}\n\t}\n}", "func (m *ContainerGetStateResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateHandle(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateState(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (client SmartGroupsClient) GetByIDResponder(resp *http.Response) (result SmartGroup, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (w *ServerInterfaceWrapper) GetEmployeesId(ctx echo.Context) error {\n\tvar err error\n\t// ------------- Path parameter \"id\" -------------\n\tvar id string\n\n\terr = runtime.BindStyledParameter(\"simple\", false, \"id\", ctx.Param(\"id\"), &id)\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf(\"Invalid format for parameter id: %s\", err))\n\t}\n\n\t// Invoke the callback with all the unmarshalled arguments\n\terr = w.Handler.GetEmployeesId(ctx, id)\n\treturn err\n}", "func NewEmployeesByIDGetOK() *EmployeesByIDGetOK {\n\treturn &EmployeesByIDGetOK{}\n}", "func (o *MemberResponse) GetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (a *EventsApiService) GetEvent(ctx _context.Context, eventId string, localVarOptionals *GetEventOpts) (EventResponse, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodGet\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue EventResponse\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/events/{eventId}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"eventId\"+\"}\", _neturl.QueryEscape(parameterToString(eventId, \"\")) , -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif localVarOptionals != nil && localVarOptionals.XAnchoreAccount.IsSet() {\n\t\tlocalVarHeaderParams[\"x-anchore-account\"] = parameterToString(localVarOptionals.XAnchoreAccount.Value(), \"\")\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (m *GetEventByIDResponse_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Content\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (msg *InformResponse) GetID() string {\n\tif len(msg.ID) < 1 {\n\t\tmsg.ID = fmt.Sprintf(\"ID:intrnl.unset.id.%s%d.%d\", msg.GetName(), time.Now().Unix(), time.Now().UnixNano())\n\t}\n\treturn msg.ID\n}", "func (o *AuthenticationResponse) GetId() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Id\n}", "func NewGetValidationByIDOK() *GetValidationByIDOK {\n\n\treturn &GetValidationByIDOK{}\n}", "func (_VinTracker *VinTrackerTransactor) GetEventById(opts *bind.TransactOpts, history *big.Int) (*types.Transaction, error) {\n\treturn _VinTracker.contract.Transact(opts, \"getEventById\", history)\n}", "func (e GetEventByIDRequestValidationError) Reason() string { return e.reason }", "func (o *UserInvitationResponseData) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (d *segmentationDescriptor) SetEventID(value uint32) {\n\td.eventID = value\n}", "func (m *EventsListResponse) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEvents(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (client *WebhooksClient) listEventsHandleResponse(resp *http.Response) (WebhooksClientListEventsResponse, error) {\n\tresult := WebhooksClientListEventsResponse{}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.EventListResult); err != nil {\n\t\treturn WebhooksClientListEventsResponse{}, err\n\t}\n\treturn result, nil\n}", "func (o *UserInvitationResponseData) HasId() bool {\n\treturn o != nil && o.Id != nil\n}", "func ParseDeleteEmployeesIdResponse(rsp *http.Response) (*DeleteEmployeesIdResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &DeleteEmployeesIdResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\t}\n\n\treturn response, nil\n}", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (o *TokenResponse) GetId() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Id\n}", "func (m *CreatMessageResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (r *Response) Validate(lastID int64) error {\n\tif !(r.StatusManager == ResponseStatusReview ||\n\t\tr.StatusManager == ResponseStatusDenied ||\n\t\tr.StatusManager == ResponseStatusAccepted) {\n\t\treturn errors.New(\"wrong manager response status\")\n\t}\n\tif !(r.StatusFreelancer == ResponseStatusReview ||\n\t\tr.StatusFreelancer == ResponseStatusDenied ||\n\t\tr.StatusFreelancer == ResponseStatusAccepted ||\n\t\tr.StatusFreelancer == ResponseStatusBlock) {\n\t\treturn errors.New(\"wrong freelancer response status\")\n\t}\n\tif r.Date.IsZero() {\n\t\treturn errors.New(\"wrong date\")\n\t}\n\tif r.ID != lastID {\n\t\treturn errors.New(\"current id does not match last id\")\n\t}\n\tif r.FreelancerId == 0 || r.JobId == 0 {\n\t\treturn errors.New(\"wrong relationships between tables\")\n\t}\n\treturn nil\n}", "func (o *EventoDTO) HasId() bool {\n\tif o != nil && o.Id != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func ParseGetInvoicesIdResponse(rsp *http.Response) (*GetInvoicesIdResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetInvoicesIdResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest Invoice\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (o *TeamSubjectResponse) GetIdOk() (*int32, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (o *GetInstancesEventByEventIDParams) SetEventID(eventID int64) {\n\to.EventID = eventID\n}", "func (o *AuthenticationResponse) GetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (o *ListEventsV1NamespacedEventReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewListEventsV1NamespacedEventOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewListEventsV1NamespacedEventUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}" ]
[ "0.62451345", "0.6210858", "0.5926266", "0.5824581", "0.580919", "0.5765843", "0.5701005", "0.5687636", "0.5674573", "0.56442136", "0.5635718", "0.5560808", "0.5514524", "0.5449539", "0.5447963", "0.54305404", "0.542107", "0.5401912", "0.53833896", "0.53833896", "0.53666943", "0.53559136", "0.5308598", "0.5308019", "0.530585", "0.5303316", "0.52874786", "0.5274724", "0.5257052", "0.52474964", "0.5247132", "0.52372515", "0.52311796", "0.52275383", "0.52240413", "0.5203669", "0.5182165", "0.5154812", "0.5145864", "0.5145864", "0.5120999", "0.5107926", "0.5105412", "0.5096604", "0.5083356", "0.50747955", "0.5040717", "0.5039951", "0.50299895", "0.50121295", "0.5010037", "0.49994817", "0.49974787", "0.49971882", "0.49841246", "0.49754566", "0.4970961", "0.49684098", "0.49611372", "0.49604467", "0.4947696", "0.4943076", "0.49406448", "0.49200583", "0.4916704", "0.49166366", "0.49148935", "0.4909724", "0.49012768", "0.48951373", "0.48804855", "0.4873957", "0.48661992", "0.48630834", "0.48560855", "0.48461473", "0.48453897", "0.4842802", "0.48384002", "0.4835945", "0.4830588", "0.48280826", "0.48234686", "0.48223084", "0.48188972", "0.4806765", "0.4802247", "0.4792849", "0.47823682", "0.47627154", "0.4760237", "0.47581783", "0.4756418", "0.47550672", "0.4747364", "0.4743824", "0.47296163", "0.47187907", "0.47157988", "0.47130302" ]
0.8033496
0
Field function returns field value.
func (e GetEventByIDResponseValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.65927887
32
Reason function returns reason value.
func (e GetEventByIDResponseValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e GetEventByIDResponseValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e GetEventByIDResponseValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on UpsertEventRequest_Section with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *UpsertEventRequest_Section) Validate() error { if m == nil { return nil } if utf8.RuneCountInString(m.GetName()) < 1 { return UpsertEventRequest_SectionValidationError{ field: "Name", reason: "value length must be at least 1 runes", } } if utf8.RuneCountInString(m.GetDescription()) < 1 { return UpsertEventRequest_SectionValidationError{ field: "Description", reason: "value length must be at least 1 runes", } } // no validation rules for Position for idx, item := range m.GetQuestions() { _, _ = idx, item if v, ok := interface{}(item).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return UpsertEventRequest_SectionValidationError{ field: fmt.Sprintf("Questions[%v]", idx), reason: "embedded message failed validation", cause: err, } } } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *UpsertEventRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetParticipants()) < 4 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Participants\",\n\t\t\treason: \"value length must be at least 4 runes\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequestValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e UpsertEventRequest_SectionValidationError) Cause() error { return e.cause }", "func (m *GetEventByIDResponse_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Description\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e UpsertEventRequest_SectionValidationError) Reason() string { return e.reason }", "func (e GetEventByIDResponse_SectionValidationError) Cause() error { return e.cause }", "func (e UpsertEventRequest_SectionValidationError) Key() bool { return e.key }", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsUpdateParentSectionGroup(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (txb *Builder) AddRequestSection(req *sctransaction.RequestSection) error {\n\ttargetAddr := (address.Address)(req.Target().ChainID())\n\tif err := txb.MintColor(targetAddr, balance.ColorIOTA, 1); err != nil {\n\t\treturn err\n\t}\n\tvar err error\n\ttran := req.Transfer()\n\tif tran != nil {\n\t\ttran.Iterate(func(col balance.Color, bal int64) bool {\n\t\t\tif err = txb.MoveTokensToAddress(targetAddr, col, bal); err != nil {\n\t\t\t\treturn false\n\t\t\t}\n\t\t\treturn true\n\t\t})\n\t}\n\ttxb.requestBlocks = append(txb.requestBlocks, req)\n\treturn nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupUpdateSections(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsUpdateParentSectionGroup(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (r *SectionGroupRequest) Update(ctx context.Context, reqObj *SectionGroup) error {\n\treturn r.JSONRequest(ctx, \"PATCH\", \"\", reqObj, nil)\n}", "func (m *UpsertEventRequest_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func UpdateSectionValidity(section MessageSectionWithSig, pkeyValidSince, pkeyValidUntil, sigValidSince, sigValidUntil int64, maxVal MaxCacheValidity) {\n\tif section != nil {\n\t\tvar maxValidity time.Duration\n\t\tswitch section.(type) {\n\t\tcase *AssertionSection:\n\t\t\tmaxValidity = maxVal.AssertionValidity\n\t\tcase *ShardSection:\n\t\t\tmaxValidity = maxVal.ShardValidity\n\t\tcase *ZoneSection:\n\t\t\tmaxValidity = maxVal.ZoneValidity\n\t\tcase *AddressAssertionSection:\n\t\t\tmaxValidity = maxVal.AddressAssertionValidity\n\t\tcase *AddressZoneSection:\n\t\t\tmaxValidity = maxVal.AddressZoneValidity\n\t\tdefault:\n\t\t\tlog.Warn(\"Not supported section\", \"type\", fmt.Sprintf(\"%T\", section))\n\t\t\treturn\n\t\t}\n\t\tif pkeyValidSince < sigValidSince {\n\t\t\tif pkeyValidUntil < sigValidUntil {\n\t\t\t\tsection.UpdateValidity(sigValidSince, pkeyValidUntil, maxValidity)\n\t\t\t} else {\n\t\t\t\tsection.UpdateValidity(sigValidSince, sigValidUntil, maxValidity)\n\t\t\t}\n\n\t\t} else {\n\t\t\tif pkeyValidUntil < sigValidUntil {\n\t\t\t\tsection.UpdateValidity(pkeyValidSince, pkeyValidUntil, maxValidity)\n\t\t\t} else {\n\t\t\t\tsection.UpdateValidity(pkeyValidSince, sigValidUntil, maxValidity)\n\t\t\t}\n\t\t}\n\t}\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsParentNotebookUpdateSections(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentNotebook/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupUpdateSectionGroups(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsUpdateSections(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupUpdateParentSectionGroup(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (ini INI) SectionUpdate(sectionName string, data map[string]string) {\n\tif _, exist := ini[sectionName]; !exist {\n\t\tini[sectionName] = make(kvMap)\n\t}\n\tfor k, v := range data {\n\t\tini[sectionName][k] = v\n\t}\n}", "func SignSection(s rainslib.MessageSectionWithSig, privateKey interface{}, sig rainslib.Signature, encoder rainslib.SignatureFormatEncoder) bool {\n\tlog.Debug(\"Sign Section\")\n\tif s == nil {\n\t\tlog.Warn(\"section is nil\")\n\t\treturn false\n\t}\n\tif int64(sig.ValidUntil) < time.Now().Unix() {\n\t\tlog.Warn(\"signature is expired\", \"signature\", sig)\n\t\treturn false\n\t}\n\tif !checkStringFields(s) {\n\t\treturn false\n\t}\n\ts.Sort()\n\terr := (&sig).SignData(privateKey, encoder.EncodeSection(s))\n\tif err != nil {\n\t\treturn false\n\t}\n\ts.AddSig(sig)\n\treturn true\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsParentNotebookUpdateSectionGroups(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, sectionGroupId1 string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentNotebook/sectionGroups({sectionGroup-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentSectionGroupUpdateSections(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (oo *OmciCC) PrepareOnuSectionsOfWindow(ctx context.Context,\n\taImageMeID uint16, aAckRequest uint8, aDownloadSectionNo uint8, aSection []byte,\n\tomciMsgsPerWindow *ia.OmciMessages, aIsExtendedOmci bool) (OmciTransferStructure, error) {\n\t//onuswsections uses only low prioirity tids\n\ttid := oo.GetOnuSwSecNextTid()\n\tlogger.Infow(ctx, \"send DlSectionRequest:\", log.Fields{\"device-id\": oo.deviceID,\n\t\t\"SequNo\": strconv.FormatInt(int64(tid), 16),\n\t\t\"InstId\": strconv.FormatInt(int64(aImageMeID), 16), \"omci-ack\": aAckRequest, \"sectionNo\": aDownloadSectionNo, \"sectionData\": aSection})\n\n\tvar omciTxReq OmciTransferStructure\n\tmsgType := omci.DownloadSectionRequestType\n\n\tif aAckRequest > 0 {\n\t\tmsgType = omci.DownloadSectionRequestWithResponseType\n\n\t}\n\tomciLayer := &omci.OMCI{\n\t\tTransactionID: tid,\n\t\tMessageType: msgType,\n\t\t// DeviceIdentifier: omci.BaselineIdent,\t\t// Optional, defaults to Baseline\n\t\t// Length: 0x28,\t\t\t\t\t\t// Optional, defaults to 40 octets\n\t}\n\tif aIsExtendedOmci {\n\t\tomciLayer.DeviceIdentifier = omci.ExtendedIdent\n\t}\n\tlocalSectionData := make([]byte, len(aSection))\n\n\tcopy(localSectionData[:], aSection) // as long as DownloadSectionRequest defines array for SectionData we need to copy into the array\n\trequest := &omci.DownloadSectionRequest{\n\t\tMeBasePacket: omci.MeBasePacket{\n\t\t\tEntityClass: me.SoftwareImageClassID,\n\t\t\tEntityInstance: aImageMeID, //inactive image\n\t\t\tExtended: aIsExtendedOmci,\n\t\t},\n\t\tSectionNumber: aDownloadSectionNo,\n\t\tSectionData: localSectionData,\n\t}\n\n\tvar options gopacket.SerializeOptions\n\toptions.FixLengths = true\n\tbuffer := gopacket.NewSerializeBuffer()\n\terr := gopacket.SerializeLayers(buffer, options, omciLayer, request)\n\tif err != nil {\n\t\tlogger.Errorw(ctx, \"Cannot serialize DlSectionRequest\", log.Fields{\"Err\": err,\n\t\t\t\"device-id\": oo.deviceID})\n\t\treturn omciTxReq, err\n\t}\n\toutgoingPacket := buffer.Bytes()\n\n\tomciMsgsPerWindow.Messages = append(omciMsgsPerWindow.Messages, outgoingPacket)\n\n\tif aAckRequest > 0 {\n\t\t// only the last section should have a timeout as an ack is required only for the last section of the window\n\t\tomciTxReq = OmciTransferStructure{\n\t\t\twithFramePrint: true,\n\t\t\tOnuSwWindow: omciMsgsPerWindow,\n\t\t}\n\t\treturn omciTxReq, nil\n\t}\n\n\treturn omciTxReq, nil\n}", "func (e UpsertEventRequest_SectionValidationError) Field() string { return e.field }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupCreateSections(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/sections\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (mc *MessageCard) AddSection(section ...*MessageCardSection) error {\n\tfor _, s := range section {\n\t\t// bail if a completely nil section provided\n\t\tif s == nil {\n\t\t\treturn fmt.Errorf(\"func AddSection: nil MessageCardSection received\")\n\t\t}\n\n\t\t// Perform validation of all MessageCardSection fields in an effort to\n\t\t// avoid adding a MessageCardSection with zero value fields. This is\n\t\t// done to avoid generating an empty sections JSON array since the\n\t\t// Sections slice for the MessageCard type would technically not be at\n\t\t// a zero value state. Due to this non-zero value state, the\n\t\t// encoding/json package would end up including the Sections struct\n\t\t// field in the output JSON.\n\t\t// See also https://github.com/golang/go/issues/11939\n\t\tswitch {\n\t\t// If any of these cases trigger, skip over the `default` case\n\t\t// statement and add the section.\n\t\tcase s.Images != nil:\n\t\tcase s.Facts != nil:\n\t\tcase s.HeroImage != nil:\n\t\tcase s.StartGroup:\n\t\tcase s.Markdown:\n\t\tcase s.ActivityText != \"\":\n\t\tcase s.ActivitySubtitle != \"\":\n\t\tcase s.ActivityTitle != \"\":\n\t\tcase s.ActivityImage != \"\":\n\t\tcase s.Text != \"\":\n\t\tcase s.Title != \"\":\n\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"all fields found to be at zero-value, skipping section\")\n\t\t}\n\n\t\tmc.Sections = append(mc.Sections, s)\n\t}\n\n\treturn nil\n}", "func (a *GroupsOnenoteNotebooksSectionGroupApiService) GroupsOnenoteNotebooksSectionGroupsSectionsUpdateParentSectionGroup(ctx _context.Context, groupId string, notebookId string, sectionGroupId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/notebooks({notebook-id})/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (mc *MessageCard) AddSection(section ...*MessageCardSection) error {\n\tfor _, s := range section {\n\t\tlogger.Printf(\"AddSection: MessageCardSection received: %+v\\n\", s)\n\n\t\t// bail if a completely nil section provided\n\t\tif s == nil {\n\t\t\treturn fmt.Errorf(\"func AddSection: nil MessageCardSection received\")\n\t\t}\n\n\t\t// Perform validation of all MessageCardSection fields in an effort to\n\t\t// avoid adding a MessageCardSection with zero value fields. This is\n\t\t// done to avoid generating an empty sections JSON array since the\n\t\t// Sections slice for the MessageCard type would technically not be at\n\t\t// a zero value state. Due to this non-zero value state, the\n\t\t// encoding/json package would end up including the Sections struct\n\t\t// field in the output JSON.\n\t\t// See also https://github.com/golang/go/issues/11939\n\t\tswitch {\n\t\t// If any of these cases trigger, skip over the `default` case\n\t\t// statement and add the section.\n\t\tcase s.Images != nil:\n\t\tcase s.Facts != nil:\n\t\tcase s.HeroImage != nil:\n\t\tcase s.StartGroup:\n\t\tcase s.Markdown:\n\t\tcase s.ActivityText != \"\":\n\t\tcase s.ActivitySubtitle != \"\":\n\t\tcase s.ActivityTitle != \"\":\n\t\tcase s.ActivityImage != \"\":\n\t\tcase s.Text != \"\":\n\t\tcase s.Title != \"\":\n\n\t\tdefault:\n\t\t\tlogger.Println(\"AddSection: No cases matched, all fields assumed to be at zero-value, skipping section\")\n\t\t\treturn fmt.Errorf(\"all fields found to be at zero-value, skipping section\")\n\t\t}\n\n\t\tlogger.Println(\"AddSection: section contains at least one non-zero value, adding section\")\n\t\tmc.Sections = append(mc.Sections, s)\n\t}\n\n\treturn nil\n}", "func (a *MeOnenoteSectionsNotebookApiService) MeOnenoteSectionsParentNotebookSectionGroupsUpdateSections(ctx _context.Context, onenoteSectionId string, sectionGroupId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/me/onenote/sections({onenoteSection-id})/parentNotebook/sectionGroups({sectionGroup-id})/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsPagesUpdateParentSection(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, onenotePageId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/pages({onenotePage-id})/parentSection\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentNotebookSectionGroupsUpdateSections(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentNotebook/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentSectionGroupUpdateSectionGroups(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsPagesParentNotebookUpdateSections(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, onenotePageId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentSectionGroupParentNotebookUpdateSections(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup/parentNotebook/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsCreateSections(ctx _context.Context, userId string, sectionGroupId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionsParentSectionGroupUpdateSections(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsParentNotebookCreateSections(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentNotebook/sections\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksUpdateSections(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) HasSectionId() bool {\n\tif o != nil && o.SectionId.IsSet() {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (mr *MockAdminSectionRepoMockRecorder) UpdateSection(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpdateSection\", reflect.TypeOf((*MockAdminSectionRepo)(nil).UpdateSection), arg0, arg1)\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsPagesParentNotebookUpdateSectionGroups(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, onenotePageId string, sectionGroupId1 string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (e UpsertEventRequest_OptionValidationError) Cause() error { return e.cause }", "func (e GetEventByIDResponse_SectionValidationError) Reason() string { return e.reason }", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) SetSectionId(v int32) {\n\to.SectionId.Set(&v)\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsPagesUpdateParentSection(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, onenotePageId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/pages({onenotePage-id})/parentSection\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentNotebookUpdateSections(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentNotebook/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *SitesOnenotePagesNotebookApiService) SitesOnenotePagesParentNotebookSectionsParentSectionGroupUpdateSections(ctx _context.Context, siteId string, onenotePageId string, onenoteSectionId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/sites({site-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"site-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", siteId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (e GetEventByIDResponse_SectionValidationError) Key() bool { return e.key }", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentNotebookSectionGroupsUpdateSectionGroups(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, sectionGroupId1 string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentNotebook/sectionGroups({sectionGroup-id})/sectionGroups({sectionGroup-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionsParentSectionGroupUpdateSectionGroups(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionGroupsSectionsUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsUpdateParentNotebook(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, microsoftGraphNotebook MicrosoftGraphNotebook) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentNotebook\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphNotebook\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (in *Section) DeepCopy() *Section {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Section)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (s *EvaluationFormItem) SetSection(v *EvaluationFormSection) *EvaluationFormItem {\n\ts.Section = v\n\treturn s\n}", "func (a *GroupsOnenoteNotebooksSectionGroupApiService) GroupsOnenoteNotebooksSectionGroupsUpdateParentSectionGroup(ctx _context.Context, groupId string, notebookId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/notebooks({notebook-id})/sectionGroups({sectionGroup-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentSectionGroupUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenoteNotebooksSectionGroupApiService) GroupsOnenoteNotebooksSectionGroupsUpdateSections(ctx _context.Context, groupId string, notebookId string, sectionGroupId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/notebooks({notebook-id})/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *MeOnenoteSectionsNotebookApiService) MeOnenoteSectionsParentNotebookSectionGroupsUpdateParentSectionGroup(ctx _context.Context, onenoteSectionId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/me/onenote/sections({onenoteSection-id})/parentNotebook/sectionGroups({sectionGroup-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupCreateSectionGroups(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (MicrosoftGraphSectionGroup, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphSectionGroup\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/sectionGroups\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphSectionGroup\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentSectionGroupParentNotebookUpdateSectionGroups(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup/parentNotebook/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *MeOnenoteSectionsNotebookApiService) MeOnenoteSectionsParentNotebookUpdateSections(ctx _context.Context, onenoteSectionId string, onenoteSectionId1 string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/me/onenote/sections({onenoteSection-id})/parentNotebook/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) GetSectionId() int32 {\n\tif o == nil || o.SectionId.Get() == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\treturn *o.SectionId.Get()\n}", "func (a *MeOnenoteSectionsNotebookApiService) MeOnenoteSectionsParentNotebookUpdateSectionGroups(ctx _context.Context, onenoteSectionId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/me/onenote/sections({onenoteSection-id})/parentNotebook/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *MeOnenoteSectionsNotebookApiService) MeOnenoteSectionsParentNotebookSectionGroupsUpdateSectionGroups(ctx _context.Context, onenoteSectionId string, sectionGroupId string, sectionGroupId1 string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/me/onenote/sections({onenoteSection-id})/parentNotebook/sectionGroups({sectionGroup-id})/sectionGroups({sectionGroup-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenoteNotebooksSectionGroupApiService) GroupsOnenoteNotebooksSectionGroupsSectionsPagesUpdateParentSection(ctx _context.Context, groupId string, notebookId string, sectionGroupId string, onenoteSectionId string, onenotePageId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/notebooks({notebook-id})/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/pages({onenotePage-id})/parentSection\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentNotebookUpdateSectionGroups(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentNotebook/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionGroupsUpdateSections(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupUpdateParentNotebook(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphNotebook MicrosoftGraphNotebook) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/parentNotebook\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphNotebook\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsParentNotebookCreateSectionGroups(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (MicrosoftGraphSectionGroup, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphSectionGroup\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentNotebook/sectionGroups\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphSectionGroup\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (ss SectionSlice) Validate() error {\n\tif len(ss) == 0 {\n\t\treturn errgo.New(\"SectionSlice is empty\")\n\t}\n\t// @todo try to pick the right strategy between maps and slice depending on the overall size of a full SectionSlice\n\tvar pc = make(utils.StringSlice, ss.TotalFields()) // pc path checker\n\ti := 0\n\tfor _, s := range ss {\n\t\tfor _, g := range s.Groups {\n\t\t\tfor _, f := range g.Fields {\n\t\t\t\tp := ScopeKey(Path(s.ID, g.ID, f.ID))\n\t\t\t\tif pc.Include(p) {\n\t\t\t\t\treturn errgo.Newf(\"Duplicate entry for path %s :: %s\", p, ss.ToJson())\n\t\t\t\t}\n\t\t\t\tpc[i] = p\n\t\t\t\ti++\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}", "func (a *SitesOnenotePagesNotebookApiService) SitesOnenotePagesParentNotebookSectionGroupsSectionsUpdateParentSectionGroup(ctx _context.Context, siteId string, onenotePageId string, sectionGroupId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/sites({site-id})/onenote/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"site-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", siteId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionsParentSectionGroupUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentNotebookSectionGroupsUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentNotebook/sectionGroups({sectionGroup-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (f *EvaluationReportFormFiller) violationsSection(violations models.ReportViolations, additionalKPIData AdditionalKPIData) error {\n\tf.subsectionHeading(fmt.Sprintf(\"Violations observed (%d)\", len(violations)))\n\n\tkpis := map[string]bool{}\n\tfor _, reportViolation := range violations {\n\t\tviolation := reportViolation.Violation\n\t\tif violation.IsKpi {\n\t\t\t// Save all the KPI fields that we'll need to display after the violations\n\t\t\tif violation.AdditionalDataElem == \"observedPickupSpreadDates\" {\n\t\t\t\tkpis[\"ObservedPickupSpreadStartDate\"] = true\n\t\t\t\tkpis[\"ObservedPickupSpreadEndDate\"] = true\n\t\t\t} else {\n\t\t\t\telementName := violation.AdditionalDataElem\n\t\t\t\tkpis[strings.ToUpper(elementName[0:1])+elementName[1:]] = true\n\t\t\t}\n\t\t}\n\t\tf.violation(violation)\n\t\tf.addVerticalSpace(pxToMM(16.0))\n\t}\n\n\tif len(kpis) > 0 {\n\t\tallKPIs := []string{}\n\t\tfor kpi, present := range kpis {\n\t\t\tif present {\n\t\t\t\tallKPIs = append(allKPIs, kpi)\n\t\t\t}\n\t\t}\n\t\terr := f.subsection(\"Additional data for KPIs\", allKPIs, KPIFieldLabels, additionalKPIData)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn f.pdf.Error()\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionsUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func TestBadSectionDef(t *testing.T) {\n\tinput := \"[section\\n\"\n\tbr := bufio.NewReader(strings.NewReader(input))\n\tp := newParser(br)\n\t_, err := p.NextValue()\n\tif err.(*ParseError).Code() != ErrInvalidSection {\n\t\tt.Fatalf(\"expected err=ErrInvalidSection actual=%s\", err)\n\t}\n}", "func (m *UpsertEventResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (o *BoardsSectionsPosition) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"rdb: no boards_sections_positions provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(boardsSectionsPositionColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLBoardsSectionsPositionUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tboardsSectionsPositionUpsertCacheMut.RLock()\n\tcache, cached := boardsSectionsPositionUpsertCache[key]\n\tboardsSectionsPositionUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tboardsSectionsPositionAllColumns,\n\t\t\tboardsSectionsPositionColumnsWithDefault,\n\t\t\tboardsSectionsPositionColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tboardsSectionsPositionAllColumns,\n\t\t\tboardsSectionsPositionPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"rdb: unable to upsert boards_sections_positions, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"boards_sections_positions\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `boards_sections_positions` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(boardsSectionsPositionType, boardsSectionsPositionMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(boardsSectionsPositionType, boardsSectionsPositionMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"rdb: unable to upsert for boards_sections_positions\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == boardsSectionsPositionMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(boardsSectionsPositionType, boardsSectionsPositionMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"rdb: unable to retrieve unique values for boards_sections_positions\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"rdb: unable to populate default values for boards_sections_positions\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tboardsSectionsPositionUpsertCacheMut.Lock()\n\t\tboardsSectionsPositionUpsertCache[key] = cache\n\t\tboardsSectionsPositionUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentNotebookSectionGroupsCreateSections(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentNotebook/sectionGroups({sectionGroup-id})/sections\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (m *SectionGroup) SetSections(value []OnenoteSectionable)() {\n m.sections = value\n}", "func (e *Entry) Section() string {\n\tconst slash = \"/\"\n\tsize := len(e.Request)\n\t//\n\tif size == 0 {\n\t\treturn slash\n\t}\n\n\tindexSlash := 0\n\tfor i := 0; i < size; i++ {\n\t\tif e.Request[i:i+1] == slash {\n\t\t\tindexSlash++\n\t\t\t//it meets the second slash, it can return the section\n\t\t\tif indexSlash == 2 {\n\t\t\t\treturn e.Request[:i]\n\t\t\t}\n\t\t}\n\t}\n\treturn e.Request\n}", "func (m *UpsertEventRequest_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (mr *MockVirtualMeshCertificateSigningRequestClientMockRecorder) UpsertVirtualMeshCertificateSigningRequest(ctx, obj interface{}, transitionFuncs ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{ctx, obj}, transitionFuncs...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertVirtualMeshCertificateSigningRequest\", reflect.TypeOf((*MockVirtualMeshCertificateSigningRequestClient)(nil).UpsertVirtualMeshCertificateSigningRequest), varargs...)\n}", "func (m *CourseSection) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (mr *MockVirtualMeshCertificateSigningRequestWriterMockRecorder) UpsertVirtualMeshCertificateSigningRequest(ctx, obj interface{}, transitionFuncs ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{ctx, obj}, transitionFuncs...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertVirtualMeshCertificateSigningRequest\", reflect.TypeOf((*MockVirtualMeshCertificateSigningRequestWriter)(nil).UpsertVirtualMeshCertificateSigningRequest), varargs...)\n}", "func (a *GroupsOnenotePagesOnenoteSectionApiService) GroupsOnenotePagesParentSectionParentSectionGroupCreateSections(ctx _context.Context, groupId string, onenotePageId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentSection/parentSectionGroup/sections\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *UsersOnenoteSectionGroupsOnenoteSectionApiService) UsersOnenoteSectionGroupsSectionsPagesParentNotebookCreateSections(ctx _context.Context, userId string, sectionGroupId string, onenoteSectionId string, onenotePageId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})/pages({onenotePage-id})/parentNotebook/sections\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 201 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionGroupsUpdateSectionGroups(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, sectionGroupId1 string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id})/sectionGroups({sectionGroup-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (service *EntriesService) Upsert(spaceID, contentTypeID string, e *Entry) error {\n\tbytesArray, err := json.Marshal(e)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvar path string\n\tvar method string\n\n\tif e.Sys != nil && e.Sys.ID != \"\" {\n\t\tpath = fmt.Sprintf(\"/spaces/%s/entries/%s\", spaceID, e.Sys.ID)\n\t\tmethod = \"PUT\"\n\t} else {\n\t\tpath = fmt.Sprintf(\"/spaces/%s/entries\", spaceID)\n\t\tmethod = \"POST\"\n\t}\n\n\treq, err := service.c.newRequest(method, path, nil, bytes.NewReader(bytesArray))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treq.Header.Set(\"X-Contentful-Content-Type\", contentTypeID)\n\treq.Header.Set(\"X-Contentful-Version\", strconv.Itoa(e.GetVersion()))\n\n\treturn service.c.do(req, e)\n}", "func (s *BaseAspidaListener) EnterSections(ctx *SectionsContext) {}", "func (a *SitesOnenotePagesNotebookApiService) SitesOnenotePagesParentNotebookSectionsUpdateParentSectionGroup(ctx _context.Context, siteId string, onenotePageId string, onenoteSectionId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/sites({site-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"site-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", siteId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenoteNotebooksSectionGroupApiService) GroupsOnenoteNotebooksSectionGroupsUpdateSectionGroups(ctx _context.Context, groupId string, notebookId string, sectionGroupId string, sectionGroupId1 string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/notebooks({notebook-id})/sectionGroups({sectionGroup-id})/sectionGroups({sectionGroup-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *SitesOnenotePagesNotebookApiService) SitesOnenotePagesParentNotebookSectionsParentSectionGroupUpdateSectionGroups(ctx _context.Context, siteId string, onenotePageId string, onenoteSectionId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/sites({site-id})/onenote/pages({onenotePage-id})/parentNotebook/sections({onenoteSection-id})/parentSectionGroup/sectionGroups({sectionGroup-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"site-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", siteId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) UnsetSectionId() {\n\to.SectionId.Unset()\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsUpdateParentNotebook(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, microsoftGraphNotebook MicrosoftGraphNotebook) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentNotebook\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphNotebook\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *GroupsOnenotePagesNotebookApiService) GroupsOnenotePagesParentNotebookSectionGroupsUpdateParentSectionGroup(ctx _context.Context, groupId string, onenotePageId string, sectionGroupId string, microsoftGraphSectionGroup MicrosoftGraphSectionGroup) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/groups({group-id})/onenote/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id})/parentSectionGroup\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"group-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", groupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphSectionGroup\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (a *SitesOnenotePagesNotebookApiService) SitesOnenotePagesParentNotebookSectionGroupsUpdateSections(ctx _context.Context, siteId string, onenotePageId string, sectionGroupId string, onenoteSectionId string, microsoftGraphOnenoteSection MicrosoftGraphOnenoteSection) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/sites({site-id})/onenote/pages({onenotePage-id})/parentNotebook/sectionGroups({sectionGroup-id})/sections({onenoteSection-id})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"site-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", siteId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenotePage-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenotePageId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"sectionGroup-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", sectionGroupId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &microsoftGraphOnenoteSection\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (e UpsertEventRequestValidationError) Key() bool { return e.key }" ]
[ "0.69682544", "0.6319706", "0.5667014", "0.5641905", "0.5418189", "0.5379082", "0.5306654", "0.52363414", "0.52132237", "0.51903903", "0.5180299", "0.5160214", "0.51544005", "0.5071639", "0.50441396", "0.5016419", "0.49991447", "0.49895084", "0.49400336", "0.49148527", "0.4876348", "0.48287317", "0.48172304", "0.48083016", "0.47943166", "0.47884202", "0.4784413", "0.47688323", "0.47551462", "0.475124", "0.47330114", "0.47167572", "0.46949065", "0.46835175", "0.46480292", "0.4647574", "0.46340564", "0.4614457", "0.45876104", "0.458581", "0.45851803", "0.45757148", "0.45680577", "0.45618212", "0.4559347", "0.45552567", "0.45529166", "0.4551706", "0.45332775", "0.453174", "0.4531389", "0.45312327", "0.45295218", "0.45283666", "0.45164177", "0.45149562", "0.44851455", "0.44843623", "0.44702634", "0.44661003", "0.4459086", "0.4456161", "0.44459584", "0.44456726", "0.4444478", "0.44305322", "0.4429833", "0.44238243", "0.44194022", "0.44164002", "0.43958756", "0.43899885", "0.43867525", "0.43744722", "0.43644506", "0.43597126", "0.43486407", "0.43392372", "0.43376222", "0.43375748", "0.43228257", "0.42880997", "0.42873743", "0.42814276", "0.4269437", "0.42656913", "0.4256075", "0.42552957", "0.42500755", "0.4243621", "0.42431512", "0.42430285", "0.42318746", "0.42298365", "0.42284736", "0.42216268", "0.42167765", "0.4211447", "0.42081812", "0.42043164" ]
0.80464846
0
Field function returns field value.
func (e UpsertEventRequest_SectionValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e UpsertEventRequest_SectionValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e UpsertEventRequest_SectionValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e UpsertEventRequest_SectionValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on UpsertEventRequest_Question with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *UpsertEventRequest_Question) Validate() error { if m == nil { return nil } if utf8.RuneCountInString(m.GetContent()) < 1 { return UpsertEventRequest_QuestionValidationError{ field: "Content", reason: "value length must be at least 1 runes", } } // no validation rules for Position // no validation rules for Type // no validation rules for IsRequired // no validation rules for LimitedChoice for idx, item := range m.GetOptions() { _, _ = idx, item if v, ok := interface{}(item).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return UpsertEventRequest_QuestionValidationError{ field: fmt.Sprintf("Options[%v]", idx), reason: "embedded message failed validation", cause: err, } } } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (e UpsertEventRequest_QuestionValidationError) Cause() error { return e.cause }", "func (m *UpsertEventRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetParticipants()) < 4 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Participants\",\n\t\t\treason: \"value length must be at least 4 runes\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequestValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e UpsertEventRequest_QuestionValidationError) Reason() string { return e.reason }", "func (m *UpsertEventRequest_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetDescription()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Description\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (m *GetEventByIDResponse_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Content\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e GetEventByIDResponse_QuestionValidationError) Cause() error { return e.cause }", "func (e UpsertEventRequestValidationError) Reason() string { return e.reason }", "func QuestionCreateView(req helios.Request) {\n\tuser, ok := req.GetContextData(auth.UserContextKey).(auth.User)\n\tif !ok {\n\t\treq.SendJSON(helios.ErrInternalServerError.GetMessage(), helios.ErrInternalServerError.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar eventSlug string = req.GetURLParam(\"eventSlug\")\n\tvar questionData QuestionData\n\tvar question Question\n\tvar err helios.Error\n\terr = req.DeserializeRequestData(&questionData)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\n\terr = DeserializeQuestion(questionData, &question)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\n\tquestion.ID = 0\n\terr = UpsertQuestion(user, eventSlug, &question)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\n\treq.SendJSON(SerializeQuestion(question), http.StatusCreated)\n}", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (m *UpsertEventResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (m *UpsertEventRequest_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (e UpsertEventResponseValidationError) Reason() string { return e.reason }", "func (e UpsertEventRequest_OptionValidationError) Cause() error { return e.cause }", "func (e UpsertEventRequest_QuestionValidationError) Key() bool { return e.key }", "func (e GetEventByIDResponse_QuestionValidationError) Reason() string { return e.reason }", "func (e UpsertEventRequest_QuestionValidationError) Field() string { return e.field }", "func (s *TestBase) UpsertRequestCancelState(updatedInfo *p.WorkflowExecutionInfo, updatedStats *p.ExecutionStats, updatedVersionHistories *p.VersionHistories,\n\tcondition int64, upsertCancelInfos []*p.RequestCancelInfo) error {\n\treturn s.UpdateWorkflowExecutionWithRangeID(updatedInfo, updatedStats, updatedVersionHistories, nil, nil,\n\t\ts.ShardInfo.RangeID, condition, nil, nil, nil,\n\t\tnil, nil, nil, nil, upsertCancelInfos, nil,\n\t\tnil, nil, nil, \"\")\n}", "func (in *Question) DeepCopy() *Question {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Question)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (fu *FlowUpdate) AddQuestions(q ...*Question) *FlowUpdate {\n\tids := make([]uuid.UUID, len(q))\n\tfor i := range q {\n\t\tids[i] = q[i].ID\n\t}\n\treturn fu.AddQuestionIDs(ids...)\n}", "func (payload *PutEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (payload *putEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (s *EvaluationFormItem) SetQuestion(v *EvaluationFormQuestion) *EvaluationFormItem {\n\ts.Question = v\n\treturn s\n}", "func (fuo *FlowUpdateOne) AddQuestions(q ...*Question) *FlowUpdateOne {\n\tids := make([]uuid.UUID, len(q))\n\tfor i := range q {\n\t\tids[i] = q[i].ID\n\t}\n\treturn fuo.AddQuestionIDs(ids...)\n}", "func (e UpsertEventRequest_OptionValidationError) Reason() string { return e.reason }", "func (e *EvaluationHandler) EditQuestion(c *gin.Context) {\n\t// Form Data\n\tvar req RequestEditQuestion\n\t// Validation\n\terr := c.ShouldBind(&req)\n\tif err != nil {\n\t\t//a.Middleware.CheckValidate(err, c)\n\t\tvar errValidation []response.Error\n\t\tif reflect.TypeOf(err).String() != \"validator.ValidationErrors\" {\n\t\t\terror := response.Error{\"\", err.Error()}\n\t\t\terrValidation = append(errValidation, error)\n\t\t\tresponse.RespondErrorJSON(c.Writer, errValidation)\n\t\t\treturn\n\t\t}\n\t\tfor _, fieldErr := range err.(validator.ValidationErrors) {\n\t\t\te := fieldErr.Translate(e.Validator.Translation)\n\n\t\t\terror := response.Error{fieldErr.Field(), e}\n\t\t\terrValidation = append(errValidation, error)\n\t\t}\n\t\tresponse.RespondErrorJSON(c.Writer, errValidation)\n\t\treturn\n\t}\n\tid := c.Params.ByName(\"id\")\n\n\ti, err := strconv.Atoi(id)\n\te.EvaluationUsecase.EditQuestion(i, req.Question, req.Choices, req.Answer)\n\n\t// Response\n\tmsg := \"Pertanyaan ini telah diupdate\"\n\tres := struct{}{}\n\tresponse.RespondSuccessJSON(c.Writer, res, msg)\n}", "func (ut *eventPayload) Validate() (err error) {\n\tif ut.SportID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"sportId\"))\n\t}\n\tif ut.EventID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"eventId\"))\n\t}\n\tif ut.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"name\"))\n\t}\n\tif ut.SubTitle == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"subTitle\"))\n\t}\n\tif ut.StartDtTm == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"startDtTm\"))\n\t}\n\tif ut.EndDtTm == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"endDtTm\"))\n\t}\n\tif ut.LocationID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"locationId\"))\n\t}\n\tif ut.TeamAdvanceMethod == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"teamAdvanceMethod\"))\n\t}\n\treturn\n}", "func Question(name string, qtype uint16) MsgOpt {\n\treturn func(m *dns.Msg) { m.SetQuestion(name, qtype) }\n}", "func (e UpsertEventRequest_SectionValidationError) Cause() error { return e.cause }", "func (fu *FlowUpdate) AddQuestionIDs(ids ...uuid.UUID) *FlowUpdate {\n\tfu.mutation.AddQuestionIDs(ids...)\n\treturn fu\n}", "func (m *RequestEmailUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEmail(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (e *EvaluationHandler) AddQuestion(c *gin.Context) {\n\tgroupName := c.Params.ByName(\"name\")\n\t// Form Data\n\tvar req RequestAddQuestion\n\t// Validation\n\terr := c.ShouldBind(&req)\n\tif err != nil {\n\t\t//a.Middleware.CheckValidate(err, c)\n\t\tvar errValidation []response.Error\n\t\tif reflect.TypeOf(err).String() != \"validator.ValidationErrors\" {\n\t\t\terror := response.Error{\"\", err.Error()}\n\t\t\terrValidation = append(errValidation, error)\n\t\t\tresponse.RespondErrorJSON(c.Writer, errValidation)\n\t\t\treturn\n\t\t}\n\t\tfor _, fieldErr := range err.(validator.ValidationErrors) {\n\t\t\te := fieldErr.Translate(e.Validator.Translation)\n\n\t\t\terror := response.Error{fieldErr.Field(), e}\n\t\t\terrValidation = append(errValidation, error)\n\t\t}\n\t\tresponse.RespondErrorJSON(c.Writer, errValidation)\n\t\treturn\n\t}\n\n\tif req.QuestionType == \"prepost\" {\n\t\tgroupName = \"prepost\"\n\t}\n\n\te.EvaluationUsecase.AddQuestion(req.Question, groupName, req.Choices, req.Answer)\n\n\t// Response\n\tmsg := \"Pertanyaan baru telah ditambahkan\"\n\tres := struct{}{}\n\tresponse.RespondSuccessJSON(c.Writer, res, msg)\n}", "func (fuo *FlowUpdateOne) AddQuestionIDs(ids ...uuid.UUID) *FlowUpdateOne {\n\tfuo.mutation.AddQuestionIDs(ids...)\n\treturn fuo\n}", "func (t *Trace) Question(qs []dns.Question) {\n\tif log.V(1) {\n\t\tt.lprintf(1, questionsToString(qs))\n\t}\n}", "func (ac *AnswerCreate) SetQuestion(q *Question) *AnswerCreate {\n\treturn ac.SetQuestionID(q.ID)\n}", "func (e SetApplicationPubSubRequestValidationError) Cause() error { return e.cause }", "func (q *Quest) Validate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.Validate(\n\t\t&validators.UUIDIsPresent{Field: q.UserID, Name: \"UserID\"},\n\t\t&validators.StringIsPresent{Field: q.Name, Name: \"Name\"},\n\t\t&validators.StringIsPresent{Field: q.Description, Name: \"Description\"},\n\t\t&validators.UUIDIsPresent{Field: q.CampaignID, Name: \"CampaignID\"},\n\t\t&validators.IntIsPresent{Field: q.SortOrder, Name: \"SortOrder\"},\n\t), nil\n}", "func (fu *FlowUpdate) RemoveQuestions(q ...*Question) *FlowUpdate {\n\tids := make([]uuid.UUID, len(q))\n\tfor i := range q {\n\t\tids[i] = q[i].ID\n\t}\n\treturn fu.RemoveQuestionIDs(ids...)\n}", "func (payload *postEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (payload *PostEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (mw authMiddleware) UpdateQuestion(ctx context.Context, m *models.Question) (*models.Question, error) {\n\trole, _, err := getRoleAndID(ctx, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif *role != \"Admin\" {\n\t\treturn nil, errAuth\n\t}\n\treturn mw.next.UpdateQuestion(ctx, m)\n}", "func (fuo *FlowUpdateOne) RemoveQuestions(q ...*Question) *FlowUpdateOne {\n\tids := make([]uuid.UUID, len(q))\n\tfor i := range q {\n\t\tids[i] = q[i].ID\n\t}\n\treturn fuo.RemoveQuestionIDs(ids...)\n}", "func NewPutQuestionBadRequest() *PutQuestionBadRequest {\n\n\treturn &PutQuestionBadRequest{}\n}", "func (ut *EventPayload) Validate() (err error) {\n\tif ut.SportID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"sportId\"))\n\t}\n\tif ut.EventID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"eventId\"))\n\t}\n\tif ut.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"name\"))\n\t}\n\tif ut.SubTitle == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"subTitle\"))\n\t}\n\tif ut.StartDtTm == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"startDtTm\"))\n\t}\n\tif ut.EndDtTm == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"endDtTm\"))\n\t}\n\tif ut.LocationID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"locationId\"))\n\t}\n\tif ut.TeamAdvanceMethod == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`type`, \"teamAdvanceMethod\"))\n\t}\n\treturn\n}", "func QuestionListView(req helios.Request) {\n\tuser, ok := req.GetContextData(auth.UserContextKey).(auth.User)\n\tif !ok {\n\t\treq.SendJSON(helios.ErrInternalServerError.GetMessage(), helios.ErrInternalServerError.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar eventSlug string = req.GetURLParam(\"eventSlug\")\n\tvar questions []Question\n\tvar err helios.Error\n\tquestions, err = GetAllQuestionOfUserAndEvent(user, eventSlug)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\n\tserializedQuestions := make([]QuestionData, 0)\n\tfor i, question := range questions {\n\t\tserializedQuestion := SerializeQuestion(question)\n\t\tserializedQuestion.Number = uint(i + 1)\n\t\tserializedQuestions = append(serializedQuestions, serializedQuestion)\n\t}\n\treq.SendJSON(serializedQuestions, http.StatusOK)\n}", "func QuestionPacket(domain string) Packet {\n\treturn Packet{\n\t\tHeader: Header{QDCount: 2},\n\t\tQuestions: []Question{\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: AAAA,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: A,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t},\n\t}\n}", "func QuestionPacket(domain string) Packet {\n\treturn Packet{\n\t\tHeader: Header{QDCount: 2},\n\t\tQuestions: []Question{\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: AAAA,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: A,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t},\n\t}\n}", "func (e UpdateMeetingV1RequestValidationError) Cause() error { return e.cause }", "func (m *FreeIpaUpscaleV1Request) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEnvironmentCrn(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateTargetAvailabilityType(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (e UpdateMeetingV1ResponseValidationError) Cause() error { return e.cause }", "func (me *XsdGoPkgHasElem_QuestionIdentifiersequenceGetFileUploadURLRequestschema_QuestionIdentifier_XsdtString_) Walk() (err error) {\n\tif fn := WalkHandlers.XsdGoPkgHasElem_QuestionIdentifiersequenceGetFileUploadURLRequestschema_QuestionIdentifier_XsdtString_; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (m *UpdateMeetingV1Response) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\treturn nil\n}", "func (v *Value) Question() string { return v.msg.Question[0].Name }", "func CreateModifyGlobalQuestionRequest() (request *ModifyGlobalQuestionRequest) {\n\trequest = &ModifyGlobalQuestionRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"OutboundBot\", \"2019-12-26\", \"ModifyGlobalQuestion\", \"\", \"\")\n\trequest.Method = requests.POST\n\treturn\n}", "func (req *CreateItemRequest) ToUpsertItemSpec() *spec.UpsertItemSpec {\n\tvar upsertItemSpec spec.UpsertItemSpec\n\tupsertItemSpec.Name = req.Name\n\tupsertItemSpec.Description = req.Description\n\tupsertItemSpec.Tags = req.Tags\n\n\treturn &upsertItemSpec\n}", "func (me *XsdGoPkgHasElem_QuestionsequenceCreateHITRequestschema_Question_XsdtString_) Walk() (err error) {\n\tif fn := WalkHandlers.XsdGoPkgHasElem_QuestionsequenceCreateHITRequestschema_Question_XsdtString_; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (ut *updateUserPayload) Validate() (err error) {\n\tif ut.Name == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"name\"))\n\t}\n\tif ut.Email == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"email\"))\n\t}\n\tif ut.Bio == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"bio\"))\n\t}\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`request.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\tif ut.Name != nil {\n\t\tif ok := goa.ValidatePattern(`\\S`, *ut.Name); !ok {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidPatternError(`request.name`, *ut.Name, `\\S`))\n\t\t}\n\t}\n\tif ut.Name != nil {\n\t\tif utf8.RuneCountInString(*ut.Name) > 256 {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidLengthError(`request.name`, *ut.Name, utf8.RuneCountInString(*ut.Name), 256, false))\n\t\t}\n\t}\n\treturn\n}", "func (m *TemplatedEmailSendRequest) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func ValidateUpdateRequest(message *taskspb.UpdateRequest) (err error) {\n\tif message.Task == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"task\", \"message\"))\n\t}\n\tif message.Task != nil {\n\t\tif err2 := ValidateStoredTask(message.Task); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\treturn\n}", "func InputEvent(event cloudevents.Event) EventRecordOption {\n\tencodedEvent, err := json.Marshal(event)\n\tif err != nil {\n\t\treturn func(pod *corev1.Pod, client *testlib.Client) error {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn envOption(\"INPUT_EVENT\", string(encodedEvent))\n}", "func QuestionDetailView(req helios.Request) {\n\tuser, ok := req.GetContextData(auth.UserContextKey).(auth.User)\n\tif !ok {\n\t\treq.SendJSON(helios.ErrInternalServerError.GetMessage(), helios.ErrInternalServerError.GetStatusCode())\n\t\treturn\n\t}\n\tvar eventSlug string = req.GetURLParam(\"eventSlug\")\n\tquestionNumber, errParseQuestionNumber := req.GetURLParamUint(\"questionNumber\")\n\tif errParseQuestionNumber != nil {\n\t\treq.SendJSON(errQuestionNotFound.GetMessage(), errQuestionNotFound.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar question *Question\n\tvar err helios.Error\n\tquestion, err = GetQuestionOfEventAndUser(user, eventSlug, questionNumber)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\tvar serializedQuestion QuestionData = SerializeQuestion(*question)\n\treq.SendJSON(serializedQuestion, http.StatusOK)\n}", "func QuestionDeleteView(req helios.Request) {\n\tuser, ok := req.GetContextData(auth.UserContextKey).(auth.User)\n\tif !ok {\n\t\treq.SendJSON(helios.ErrInternalServerError.GetMessage(), helios.ErrInternalServerError.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar eventSlug string = req.GetURLParam(\"eventSlug\")\n\tquestionNumber, errParseQuestionNumber := req.GetURLParamUint(\"questionNumber\")\n\tif errParseQuestionNumber != nil {\n\t\treq.SendJSON(errQuestionNotFound.GetMessage(), errQuestionNotFound.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar question *Question\n\tvar err helios.Error\n\tquestion, err = DeleteQuestion(user, eventSlug, questionNumber)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\tvar serializedQuestion QuestionData = SerializeQuestion(*question)\n\treq.SendJSON(serializedQuestion, http.StatusOK)\n}", "func (e UpsertEventRequest_SectionValidationError) Reason() string { return e.reason }", "func (m *UpdateMeetingV1Request) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif v, ok := interface{}(m.GetMeeting()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn UpdateMeetingV1RequestValidationError{\n\t\t\t\tfield: \"Meeting\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}", "func (v UpdateReputationProfileActionRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"PolicyID\": validation.Validate(v.PolicyID, validation.Required),\n\t\t\"ReputationProfileID\": validation.Validate(v.ReputationProfileID, validation.Required),\n\t}.Filter()\n}", "func (c *questionController) Update(context *gin.Context) {\n\tvar questionUpdateDTO dto.QuestionUpdateDTO\n\terrDTO := context.ShouldBind(&questionUpdateDTO)\n\tif errDTO != nil {\n\t\tres := helper.BuildErrorResponse(\"Failed to process request\", errDTO.Error(), helper.EmptyObj{})\n\t\tcontext.JSON(http.StatusBadRequest, res)\n\t\treturn\n\t}\n\n\tauthHeader := context.GetHeader(\"Authorization\")\n\ttoken, errToken := c.jwtService.ValidateToken(authHeader)\n\tif errToken != nil {\n\t\tpanic(errToken.Error())\n\t}\n\tclaims := token.Claims.(jwt.MapClaims)\n\tuserID := fmt.Sprintf(\"%v\", claims[\"user_id\"])\n\tif c.questionService.IsAllowedToEdit(userID, questionUpdateDTO.ID) {\n\t\tid, errID := strconv.ParseUint(userID, 10, 64)\n\t\tif errID == nil {\n\t\t\tquestionUpdateDTO.UserID = id\n\t\t}\n\t\tresult := c.questionService.Update(questionUpdateDTO)\n\t\tresponse := helper.BuildResponse(true, \"OK\", result)\n\t\tcontext.JSON(http.StatusOK, response)\n\t} else {\n\t\tresponse := helper.BuildErrorResponse(\"You dont have the permission\", \"You are not the author of the question\", helper.EmptyObj{})\n\t\tcontext.JSON(http.StatusForbidden, response)\n\t}\n}", "func ConvertUserEventRequestToUsageEvent(req CreateUserEventRequest) (*usageeventsv1.UsageEventOneOf, error) {\n\tswitch req.Event {\n\tcase bannerClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiBannerClick{\n\t\t\t\tUiBannerClick: &usageeventsv1.UIBannerClickEvent{\n\t\t\t\t\tAlert: req.Alert,\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceClick{\n\t\t\t\tUiOnboardAddFirstResourceClick: &usageeventsv1.UIOnboardAddFirstResourceClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceLaterClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceLaterClick{\n\t\t\t\tUiOnboardAddFirstResourceLaterClick: &usageeventsv1.UIOnboardAddFirstResourceLaterClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiIntegrationEnrollStartEvent,\n\t\tuiIntegrationEnrollCompleteEvent:\n\n\t\tvar event IntegrationEnrollEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tkindEnum, ok := usageeventsv1.IntegrationEnrollKind_value[event.Kind]\n\t\tif !ok {\n\t\t\treturn nil, trace.BadParameter(\"invalid integration enroll kind %s\", event.Kind)\n\t\t}\n\n\t\tswitch req.Event {\n\t\tcase uiIntegrationEnrollStartEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollStartEvent{\n\t\t\t\tUiIntegrationEnrollStartEvent: &usageeventsv1.UIIntegrationEnrollStartEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\tcase uiIntegrationEnrollCompleteEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollCompleteEvent{\n\t\t\t\tUiIntegrationEnrollCompleteEvent: &usageeventsv1.UIIntegrationEnrollCompleteEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\t}\n\n\tcase uiDiscoverStartedEvent,\n\t\tuiDiscoverResourceSelectionEvent,\n\t\tuiDiscoverIntegrationAWSOIDCConnectEvent,\n\t\tuiDiscoverDatabaseRDSEnrollEvent,\n\t\tuiDiscoverDeployServiceEvent,\n\t\tuiDiscoverDatabaseRegisterEvent,\n\t\tuiDiscoverDatabaseConfigureMTLSEvent,\n\t\tuiDiscoverDatabaseConfigureIAMPolicyEvent,\n\t\tuiDiscoverDesktopActiveDirectoryToolsInstallEvent,\n\t\tuiDiscoverDesktopActiveDirectoryConfigureEvent,\n\t\tuiDiscoverAutoDiscoveredResourcesEvent,\n\t\tuiDiscoverPrincipalsConfigureEvent,\n\t\tuiDiscoverTestConnectionEvent,\n\t\tuiDiscoverCompletedEvent:\n\n\t\tvar discoverEvent DiscoverEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &discoverEvent); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tevent, err := discoverEvent.ToUsageEvent(req.Event)\n\t\tif err != nil {\n\t\t\treturn nil, trace.BadParameter(\"failed to convert eventData: %v\", err)\n\t\t}\n\t\treturn event, nil\n\n\tcase createNewRoleClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleClick{\n\t\t\t\tUiCreateNewRoleClick: &usageeventsv1.UICreateNewRoleClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleSaveClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleSaveClick{\n\t\t\t\tUiCreateNewRoleSaveClick: &usageeventsv1.UICreateNewRoleSaveClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleCancelClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleCancelClick{\n\t\t\t\tUiCreateNewRoleCancelClick: &usageeventsv1.UICreateNewRoleCancelClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleViewDocumentationClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleViewDocumentationClick{\n\t\t\t\tUiCreateNewRoleViewDocumentationClick: &usageeventsv1.UICreateNewRoleViewDocumentationClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiCallToActionClickEvent:\n\t\tvar cta int32\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &cta); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCallToActionClickEvent{\n\t\t\t\tUiCallToActionClickEvent: &usageeventsv1.UICallToActionClickEvent{\n\t\t\t\t\tCta: usageeventsv1.CTA(cta),\n\t\t\t\t}}},\n\t\t\tnil\n\n\tcase questionnaireSubmitEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardQuestionnaireSubmit{\n\t\t\t\tUiOnboardQuestionnaireSubmit: &usageeventsv1.UIOnboardQuestionnaireSubmitEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase featureRecommendationEvent:\n\t\tevent := struct {\n\t\t\tFeature int32 `json:\"feature\"`\n\t\t\tFeatureRecommendationStatus int32 `json:\"featureRecommendationStatus\"`\n\t\t}{}\n\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_FeatureRecommendationEvent{\n\t\t\t\tFeatureRecommendationEvent: &usageeventsv1.FeatureRecommendationEvent{\n\t\t\t\t\tFeature: usageeventsv1.Feature(event.Feature),\n\t\t\t\t\tFeatureRecommendationStatus: usageeventsv1.FeatureRecommendationStatus(event.FeatureRecommendationStatus),\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\t}\n\n\treturn nil, trace.BadParameter(\"invalid event %s\", req.Event)\n}", "func (me *XsdGoPkgHasElems_QuestionIdentifiersequenceGetFileUploadURLRequestschema_QuestionIdentifier_XsdtString_) Walk() (err error) {\n\tif fn := WalkHandlers.XsdGoPkgHasElems_QuestionIdentifiersequenceGetFileUploadURLRequestschema_QuestionIdentifier_XsdtString_; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (m *OnlineMeetingsItemRegistrationCustomQuestionsMeetingRegistrationQuestionItemRequestBuilder) Patch(ctx context.Context, body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable, requestConfiguration *OnlineMeetingsItemRegistrationCustomQuestionsMeetingRegistrationQuestionItemRequestBuilderPatchRequestConfiguration)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable, error) {\n requestInfo, err := m.ToPatchRequestInformation(ctx, body, requestConfiguration);\n if err != nil {\n return nil, err\n }\n errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {\n \"4XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n \"5XX\": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,\n }\n res, err := m.BaseRequestBuilder.RequestAdapter.Send(ctx, requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateMeetingRegistrationQuestionFromDiscriminatorValue, errorMapping)\n if err != nil {\n return nil, err\n }\n if res == nil {\n return nil, nil\n }\n return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.MeetingRegistrationQuestionable), nil\n}", "func (m *AppRequest13) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateDatabaseImageID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateHandle(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *TemplateUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEnv(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateLabels(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateRepository(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateVolumes(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *ConfigureAssessmentRequest) Validate() error {\n\treturn m.validate(false)\n}", "func (e UpsertEventRequestValidationError) Field() string { return e.field }", "func (e ConfigureAssessmentRequestValidationError) Cause() error { return e.cause }", "func (_Quiz *QuizSession) Question() (string, error) {\n\treturn _Quiz.Contract.Question(&_Quiz.CallOpts)\n}", "func (e StreamEventsRequestValidationError) Cause() error { return e.cause }", "func (e GetEventByIDResponse_QuestionValidationError) Key() bool { return e.key }", "func (m *IoArgoprojWorkflowV1alpha1WorkflowTemplateUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateTemplate(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (du *DatumUpdate) SetParticipant(p *Participant) *DatumUpdate {\n\treturn du.SetParticipantID(p.ID)\n}", "func (m C11yVectorBasedQuestion) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tfor i := 0; i < len(m); i++ {\n\t\tif swag.IsZero(m[i]) { // not required\n\t\t\tcontinue\n\t\t}\n\n\t\tif m[i] != nil {\n\t\t\tif err := m[i].Validate(formats); err != nil {\n\t\t\t\tif ve, ok := err.(*errors.Validation); ok {\n\t\t\t\t\treturn ve.ValidateName(strconv.Itoa(i))\n\t\t\t\t} else if ce, ok := err.(*errors.CompositeError); ok {\n\t\t\t\t\treturn ce.ValidateName(strconv.Itoa(i))\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (fc *FileCreate) SetSurveyQuestion(s *SurveyQuestion) *FileCreate {\n\treturn fc.SetSurveyQuestionID(s.ID)\n}", "func (e *Event) Validate() error {\n\tif e.Message == \"\" || e.MessageOffset == \"\" || e.Time == nil || e.Type == \"\" {\n\t\treturn errs.ErrMissingParameters\n\t}\n\treturn nil\n}", "func (v UpdateTransactionalEndpointRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"SecurityPolicyID\": validation.Validate(v.SecurityPolicyID, validation.Required),\n\t\t\"OperationID\": validation.Validate(v.OperationID, validation.Required),\n\t\t\"JsonPayload\": validation.Validate(v.JsonPayload, validation.Required),\n\t}.Filter()\n}", "func (e CreateExperimentsRequestValidationError) Cause() error { return e.cause }", "func (ec EventContextV03) Validate() ValidationError {\n\terrors := map[string]error{}\n\n\t// type\n\t// Type: String\n\t// Constraints:\n\t// REQUIRED\n\t// MUST be a non-empty string\n\t// SHOULD be prefixed with a reverse-DNS name. The prefixed domain dictates the organization which defines the semantics of this event type.\n\teventType := strings.TrimSpace(ec.Type)\n\tif eventType == \"\" {\n\t\terrors[\"type\"] = fmt.Errorf(\"MUST be a non-empty string\")\n\t}\n\n\t// source\n\t// Type: URI-reference\n\t// Constraints:\n\t// REQUIRED\n\tsource := strings.TrimSpace(ec.Source.String())\n\tif source == \"\" {\n\t\terrors[\"source\"] = fmt.Errorf(\"REQUIRED\")\n\t}\n\n\t// subject\n\t// Type: String\n\t// Constraints:\n\t// OPTIONAL\n\t// MUST be a non-empty string\n\tif ec.Subject != nil {\n\t\tsubject := strings.TrimSpace(*ec.Subject)\n\t\tif subject == \"\" {\n\t\t\terrors[\"subject\"] = fmt.Errorf(\"if present, MUST be a non-empty string\")\n\t\t}\n\t}\n\n\t// id\n\t// Type: String\n\t// Constraints:\n\t// REQUIRED\n\t// MUST be a non-empty string\n\t// MUST be unique within the scope of the producer\n\tid := strings.TrimSpace(ec.ID)\n\tif id == \"\" {\n\t\terrors[\"id\"] = fmt.Errorf(\"MUST be a non-empty string\")\n\n\t\t// no way to test \"MUST be unique within the scope of the producer\"\n\t}\n\n\t// time\n\t// Type: Timestamp\n\t// Constraints:\n\t// OPTIONAL\n\t// If present, MUST adhere to the format specified in RFC 3339\n\t// --> no need to test this, no way to set the time without it being valid.\n\n\t// schemaurl\n\t// Type: URI\n\t// Constraints:\n\t// OPTIONAL\n\t// If present, MUST adhere to the format specified in RFC 3986\n\tif ec.SchemaURL != nil {\n\t\tschemaURL := strings.TrimSpace(ec.SchemaURL.String())\n\t\t// empty string is not RFC 3986 compatible.\n\t\tif schemaURL == \"\" {\n\t\t\terrors[\"schemaurl\"] = fmt.Errorf(\"if present, MUST adhere to the format specified in RFC 3986\")\n\t\t}\n\t}\n\n\t// datacontenttype\n\t// Type: String per RFC 2046\n\t// Constraints:\n\t// OPTIONAL\n\t// If present, MUST adhere to the format specified in RFC 2046\n\tif ec.DataContentType != nil {\n\t\tdataContentType := strings.TrimSpace(*ec.DataContentType)\n\t\tif dataContentType == \"\" {\n\t\t\terrors[\"datacontenttype\"] = fmt.Errorf(\"if present, MUST adhere to the format specified in RFC 2046\")\n\t\t} else {\n\t\t\t_, _, err := mime.ParseMediaType(dataContentType)\n\t\t\tif err != nil {\n\t\t\t\terrors[\"datacontenttype\"] = fmt.Errorf(\"if present, MUST adhere to the format specified in RFC 2046\")\n\t\t\t}\n\t\t}\n\t}\n\n\t// datacontentencoding\n\t// Type: String per RFC 2045 Section 6.1\n\t// Constraints:\n\t// The attribute MUST be set if the data attribute contains string-encoded binary data.\n\t// Otherwise the attribute MUST NOT be set.\n\t// If present, MUST adhere to RFC 2045 Section 6.1\n\tif ec.DataContentEncoding != nil {\n\t\tdataContentEncoding := strings.ToLower(strings.TrimSpace(*ec.DataContentEncoding))\n\t\tif dataContentEncoding != Base64 {\n\t\t\terrors[\"datacontentencoding\"] = fmt.Errorf(\"if present, MUST adhere to RFC 2045 Section 6.1\")\n\t\t}\n\t}\n\n\tif len(errors) > 0 {\n\t\treturn errors\n\t}\n\treturn nil\n}", "func (s *Server) AnswerQuestion(ctx context.Context, req *pb.AnswerQuestionRequest) (resp *pb.AnswerQuestionResponse, err error) {\n\tglog.Debug(\"req:%v\", req)\n\tresp = &pb.AnswerQuestionResponse{\n\t\tStatus: common.GetInitStatus(),\n\t}\n\tdefer func() {\n\t\tif err != nil {\n\t\t\tresp.Status.Message = err.Error()\n\t\t\terr = nil\n\t\t}\n\t\tglog.Debug(\"resp:%v\", resp)\n\t}()\n\n\tuserID, _ := strconv.ParseUint(req.GetHeader().GetUserId(), 10, 64)\n\tquestionID, _ := strconv.ParseUint(req.GetQuestionId(), 10, 64)\n\toptionID, _ := strconv.ParseUint(req.GetOptionId(), 10, 64)\n\ttoken := req.GetHeader().GetToken()\n\n\tvar valid bool\n\tif valid, err = util.CheckIDValid(userID, token); !valid {\n\t\tresp.Status.Code = pb.Status_USER_NOTLOGIN\n\t\tglog.Error(\"userid:%d, token:%s check not valid. err:%v\", userID, token, err)\n\t\treturn\n\t}\n\n\t// 1. Check if the question has been answered\n\tkey := fmt.Sprintf(\"%s%s\", common.USER_ACT_PREFIX, strconv.FormatUint(userID, 10))\n\tret, err := db.HGet(key, strconv.FormatUint(questionID, 10))\n\tif ret != \"\" {\n\t\tsqlString := fmt.Sprintf(\"select option_content, is_answer, answer_num from question_option where question_id=%d and option_id=%s\", questionID, ret)\n\t\tvar content string\n\t\tvar isAnswer uint32\n\t\tvar answerNum uint32\n\t\tvar right bool\n\t\ttempRow, _ := db.QueryRow(common.BUDAODB, sqlString)\n\t\terr = tempRow.Scan(&content, &isAnswer, &answerNum)\n\t\tif isAnswer == 1 {\n\t\t\tright = true\n\t\t}\n\t\toptionItem := &pb.OptionItem{\n\t\t\tOptionId: ret,\n\t\t\tQuestionId: strconv.FormatUint(questionID, 10),\n\t\t\tContent: content,\n\t\t\tRight: right,\n\t\t\tChooseCount: answerNum,\n\t\t}\n\t\tresp.Option = optionItem\n\t\tresp.Status.Message = \"question has been answered\"\n\t\tresp.Status.Code = pb.Status_OK\n\t\treturn\n\t}\n\n\t// 2. get the question answer\n\tsqlString := fmt.Sprintf(\"select is_answer from question_option where question_id = %d and option_id = %d\", questionID, optionID)\n\tvar answerFlag int\n\tfirstRow, err := db.QueryRow(common.BUDAODB, sqlString)\n\terr = firstRow.Scan(&answerFlag)\n\tif err != nil {\n\t\tglog.Error(\"query question answer failed. err:%v\", err)\n\t\treturn\n\t}\n\n\t// 3. get the question score\n\tsqlString = fmt.Sprintf(\"select score, vid from question where id = %d\", questionID)\n\tvar score uint32\n\tvar vid uint64\n\tsecondRow, err := db.QueryRow(common.BUDAODB, sqlString)\n\terr = secondRow.Scan(&score, &vid)\n\tif err != nil {\n\t\tglog.Error(\"query question score failed. err:%v\", err)\n\t\treturn\n\t}\n\n\tvar (\n\t\tuserResult int\n\t\tuserScore uint32\n\t)\n\tuserTableName, err := db.GetTableName(\"user_\", userID)\n\ttableNumber := userID >> 54\n\tuserQuestionTN := fmt.Sprintf(\"user_question_%d\", tableNumber)\n\tif answerFlag == 1 {\n\t\t// answer correct\n\t\tsqlString = fmt.Sprintf(\"update %s set right_answer_num = right_answer_num+1, get_score = get_score+%d where uid = %d\", userTableName, score, userID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update user table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tsqlString = fmt.Sprintf(\"update question set right_answer_num = right_answer_num+1 where id = %d\", questionID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update question table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tuserScore = score\n\t\tuserResult = 1\n\t} else {\n\t\t// answer error\n\t\tsqlString = fmt.Sprintf(\"update %s set wrong_answer_num = wrong_answer_num+1 where uid = %d\", userTableName, userID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update user table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tsqlString = fmt.Sprintf(\"update question set wrong_answer_num = wrong_answer_num+1 where id = %d\", questionID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update question table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tuserScore = 0\n\t\tuserResult = 2\n\t}\n\n\tsqlString = fmt.Sprintf(\"update question_option set answer_num = answer_num+1 where question_id = %d and option_id = %d\", questionID, optionID)\n\t_, err = db.Exec(common.BUDAODB, sqlString)\n\tif err != nil {\n\t\tglog.Error(\"update question_option table failed. err:%v\", err)\n\t\treturn\n\t}\n\n\tsqlString = fmt.Sprintf(\"insert into %s (uid, question_id, result, get_score, option_id) values (%d, %d, %d, %d, %d)\", userQuestionTN, userID, questionID, userResult, userScore, optionID)\n\t_, err = db.Exec(common.BUDAODB, sqlString)\n\tif err != nil {\n\t\tglog.Error(\"answer question insert user_question_ table faild. err:%v\", err)\n\t\treturn\n\t}\n\n\t// update question_dynamic\n\tfield := fmt.Sprintf(\"%s%s\", strconv.FormatUint(questionID, 10), strconv.FormatUint(optionID, 10))\n\t_, err = db.HIncrBy(common.QUESTION_DYNAMIC, field, 1)\n\tif err != nil {\n\t\tglog.Error(\"answer question insert hash question_dynamic faild. err:%v\", err)\n\t}\n\n\t// inster user_act_[uid]\n\t_, err = db.HSet(key, strconv.FormatUint(questionID, 10), strconv.FormatUint(optionID, 10))\n\tif err != nil {\n\t\tglog.Error(\"answer question insert hash user_act_ faild. err:%v\", err)\n\t}\n\n\t// update vid_dynamic\n\t_, err = db.SAdd(common.VID_DYNAMIC, strconv.FormatUint(vid, 10))\n\tif err != nil {\n\t\tglog.Error(\"insert vid_dynamic set failed. err:%v\", err)\n\t\treturn\n\t}\n\n\tresp.Status.Code = pb.Status_OK\n\n\treturn\n}", "func (scsuo *SurveyCellScanUpdateOne) SetSurveyQuestion(s *SurveyQuestion) *SurveyCellScanUpdateOne {\n\treturn scsuo.SetSurveyQuestionID(s.ID)\n}", "func (m *PayoutLocationUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateUpdatedPayload(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *AddonUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateChargeType(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePeriodUnit(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateType(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (ut *ownerInputPayload) Validate() (err error) {\n\tif ut.Email != nil {\n\t\tif err2 := goa.ValidateFormat(goa.FormatEmail, *ut.Email); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidFormatError(`response.email`, *ut.Email, goa.FormatEmail, err2))\n\t\t}\n\t}\n\treturn\n}", "func (scsu *SurveyCellScanUpdate) SetSurveyQuestion(s *SurveyQuestion) *SurveyCellScanUpdate {\n\treturn scsu.SetSurveyQuestionID(s.ID)\n}", "func (m *PrevoteReq) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateBaseReq(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateValidator(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (e SetRequestValidationError) Cause() error { return e.cause }", "func (e UpsertEventRequestValidationError) Key() bool { return e.key }", "func (v UpdateCustomClientSequenceRequest) Validate() error {\n\treturn edgegriderr.ParseValidationErrors(validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"Sequence\": validation.Validate(v.Sequence, validation.Required),\n\t})\n}", "func (e VulnEvent) Validate() error {\n\tif e.SourceName == \"\" {\n\t\treturn fmt.Errorf(\"must set SourceName in event\")\n\t}\n\tif e.Asset.IPAddress == \"\" && e.Asset.Hostname == \"\" {\n\t\treturn fmt.Errorf(\"must set IPAddress or Hostname in event\")\n\t}\n\treturn nil\n}", "func (ut *inviteUserPayload) Validate() (err error) {\n\tif ut.Email == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"email\"))\n\t}\n\tif ut.Role == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`request`, \"role\"))\n\t}\n\treturn\n}", "func (_Quiz *QuizCallerSession) Question() (string, error) {\n\treturn _Quiz.Contract.Question(&_Quiz.CallOpts)\n}", "func (a AddEventRequest) Validate() error {\n\tif err := v2.Validate(a); err != nil {\n\t\treturn err\n\t}\n\n\t// BaseReading has the skip(\"-\") validation annotation for BinaryReading and SimpleReading\n\t// Otherwise error will occur as only one of them exists\n\t// Therefore, need to validate the nested BinaryReading and SimpleReading struct here\n\tfor _, r := range a.Event.Readings {\n\t\tif err := r.Validate(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}", "func (m *LolPlayerLevelUpPlayerLevelUpEvent) Validate(formats strfmt.Registry) error {\n\treturn nil\n}" ]
[ "0.6543093", "0.61197644", "0.6106625", "0.5761736", "0.5517337", "0.5479088", "0.5441649", "0.5390963", "0.5363869", "0.534268", "0.5188957", "0.5169113", "0.5060756", "0.5022828", "0.5009821", "0.49142748", "0.48590037", "0.4809924", "0.4781676", "0.47363362", "0.47306767", "0.47063693", "0.4688728", "0.46838972", "0.46359518", "0.4534647", "0.45328668", "0.44875652", "0.44767073", "0.44756064", "0.44689822", "0.445761", "0.4455884", "0.4451742", "0.44140965", "0.44004387", "0.43841785", "0.43693367", "0.43620408", "0.4357119", "0.4310332", "0.43029672", "0.42973712", "0.4279302", "0.4267894", "0.42657718", "0.42657718", "0.42488", "0.42483938", "0.42412987", "0.4191506", "0.41876176", "0.41791418", "0.4178872", "0.41762617", "0.41753748", "0.4167739", "0.41670173", "0.41528854", "0.41508368", "0.41469777", "0.41409674", "0.41401234", "0.41150698", "0.4113728", "0.41031748", "0.40980464", "0.4092473", "0.40859064", "0.4085823", "0.40827572", "0.4076172", "0.40754408", "0.40686584", "0.40670252", "0.40654013", "0.40634444", "0.40597025", "0.40589988", "0.40545452", "0.405379", "0.40510854", "0.40427786", "0.40405208", "0.40191278", "0.40144563", "0.40033272", "0.4003151", "0.40017158", "0.40009087", "0.39961806", "0.39864334", "0.39844623", "0.39717677", "0.39663813", "0.39663798", "0.39660162", "0.3965006", "0.39601055", "0.39475176" ]
0.7601968
0
Field function returns field value.
func (e UpsertEventRequest_QuestionValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e UpsertEventRequest_QuestionValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e UpsertEventRequest_QuestionValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e UpsertEventRequest_QuestionValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on UpsertEventRequest_Option with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *UpsertEventRequest_Option) Validate() error { if m == nil { return nil } if utf8.RuneCountInString(m.GetContent()) < 1 { return UpsertEventRequest_OptionValidationError{ field: "Content", reason: "value length must be at least 1 runes", } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (e UpsertEventRequest_OptionValidationError) Cause() error { return e.cause }", "func (m *UpsertEventRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetParticipants()) < 4 {\n\t\treturn UpsertEventRequestValidationError{\n\t\t\tfield: \"Participants\",\n\t\t\treason: \"value length must be at least 4 runes\",\n\t\t}\n\t}\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequestValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e UpsertEventRequest_OptionValidationError) Reason() string { return e.reason }", "func (m *UpsertEventRequest_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetDescription()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Description\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (opt OptUpsert) Option(d *bson.Document) error {\n\td.Append(bson.EC.Boolean(\"upsert\", bool(opt)))\n\treturn nil\n}", "func (s *TestBase) UpsertRequestCancelState(updatedInfo *p.WorkflowExecutionInfo, updatedStats *p.ExecutionStats, updatedVersionHistories *p.VersionHistories,\n\tcondition int64, upsertCancelInfos []*p.RequestCancelInfo) error {\n\treturn s.UpdateWorkflowExecutionWithRangeID(updatedInfo, updatedStats, updatedVersionHistories, nil, nil,\n\t\ts.ShardInfo.RangeID, condition, nil, nil, nil,\n\t\tnil, nil, nil, nil, upsertCancelInfos, nil,\n\t\tnil, nil, nil, \"\")\n}", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (req *UpsertRequest) Opts(opts UpsertOpts) *UpsertRequest {\n\treq.opts = opts\n\treturn req\n}", "func (uo *UpdateOptions) SetUpsert(b bool) *UpdateOptions {\n\tuo.Upsert = &b\n\treturn uo\n}", "func (opt EditPullRequestOption) Validate(c *Client) error {\n\tif len(opt.Title) != 0 && len(strings.TrimSpace(opt.Title)) == 0 {\n\t\treturn fmt.Errorf(\"title is empty\")\n\t}\n\tif len(opt.Base) != 0 {\n\t\tif err := c.checkServerVersionGreaterThanOrEqual(version1_12_0); err != nil {\n\t\t\treturn fmt.Errorf(\"can not change base gitea to old\")\n\t\t}\n\t}\n\treturn nil\n}", "func (u *UpdateOption) Valid() error {\n\tif u.CronExpression == nil && u.JobOtherInputs == nil {\n\t\treturn ErrUpdateOptionInvalid\n\t}\n\tif u.CronExpression != nil {\n\t\tif _, err := u.schedule(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (e UpsertEventRequest_OptionValidationError) Key() bool { return e.key }", "func (uom *UpdateOneModel) SetUpsert(upsert bool) *UpdateOneModel {\n\tuom.Upsert = &upsert\n\treturn uom\n}", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func InputEvent(event cloudevents.Event) EventRecordOption {\n\tencodedEvent, err := json.Marshal(event)\n\tif err != nil {\n\t\treturn func(pod *corev1.Pod, client *testlib.Client) error {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn envOption(\"INPUT_EVENT\", string(encodedEvent))\n}", "func (e GetEventByIDResponse_OptionValidationError) Cause() error { return e.cause }", "func ConvertUserEventRequestToUsageEvent(req CreateUserEventRequest) (*usageeventsv1.UsageEventOneOf, error) {\n\tswitch req.Event {\n\tcase bannerClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiBannerClick{\n\t\t\t\tUiBannerClick: &usageeventsv1.UIBannerClickEvent{\n\t\t\t\t\tAlert: req.Alert,\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceClick{\n\t\t\t\tUiOnboardAddFirstResourceClick: &usageeventsv1.UIOnboardAddFirstResourceClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase addFirstResourceLaterClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardAddFirstResourceLaterClick{\n\t\t\t\tUiOnboardAddFirstResourceLaterClick: &usageeventsv1.UIOnboardAddFirstResourceLaterClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiIntegrationEnrollStartEvent,\n\t\tuiIntegrationEnrollCompleteEvent:\n\n\t\tvar event IntegrationEnrollEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tkindEnum, ok := usageeventsv1.IntegrationEnrollKind_value[event.Kind]\n\t\tif !ok {\n\t\t\treturn nil, trace.BadParameter(\"invalid integration enroll kind %s\", event.Kind)\n\t\t}\n\n\t\tswitch req.Event {\n\t\tcase uiIntegrationEnrollStartEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollStartEvent{\n\t\t\t\tUiIntegrationEnrollStartEvent: &usageeventsv1.UIIntegrationEnrollStartEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\tcase uiIntegrationEnrollCompleteEvent:\n\t\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiIntegrationEnrollCompleteEvent{\n\t\t\t\tUiIntegrationEnrollCompleteEvent: &usageeventsv1.UIIntegrationEnrollCompleteEvent{\n\t\t\t\t\tMetadata: &usageeventsv1.IntegrationEnrollMetadata{\n\t\t\t\t\t\tId: event.ID,\n\t\t\t\t\t\tKind: usageeventsv1.IntegrationEnrollKind(kindEnum),\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t}}, nil\n\t\t}\n\n\tcase uiDiscoverStartedEvent,\n\t\tuiDiscoverResourceSelectionEvent,\n\t\tuiDiscoverIntegrationAWSOIDCConnectEvent,\n\t\tuiDiscoverDatabaseRDSEnrollEvent,\n\t\tuiDiscoverDeployServiceEvent,\n\t\tuiDiscoverDatabaseRegisterEvent,\n\t\tuiDiscoverDatabaseConfigureMTLSEvent,\n\t\tuiDiscoverDatabaseConfigureIAMPolicyEvent,\n\t\tuiDiscoverDesktopActiveDirectoryToolsInstallEvent,\n\t\tuiDiscoverDesktopActiveDirectoryConfigureEvent,\n\t\tuiDiscoverAutoDiscoveredResourcesEvent,\n\t\tuiDiscoverPrincipalsConfigureEvent,\n\t\tuiDiscoverTestConnectionEvent,\n\t\tuiDiscoverCompletedEvent:\n\n\t\tvar discoverEvent DiscoverEventData\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &discoverEvent); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\tevent, err := discoverEvent.ToUsageEvent(req.Event)\n\t\tif err != nil {\n\t\t\treturn nil, trace.BadParameter(\"failed to convert eventData: %v\", err)\n\t\t}\n\t\treturn event, nil\n\n\tcase createNewRoleClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleClick{\n\t\t\t\tUiCreateNewRoleClick: &usageeventsv1.UICreateNewRoleClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleSaveClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleSaveClick{\n\t\t\t\tUiCreateNewRoleSaveClick: &usageeventsv1.UICreateNewRoleSaveClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleCancelClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleCancelClick{\n\t\t\t\tUiCreateNewRoleCancelClick: &usageeventsv1.UICreateNewRoleCancelClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase createNewRoleViewDocumentationClickEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCreateNewRoleViewDocumentationClick{\n\t\t\t\tUiCreateNewRoleViewDocumentationClick: &usageeventsv1.UICreateNewRoleViewDocumentationClickEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase uiCallToActionClickEvent:\n\t\tvar cta int32\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &cta); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiCallToActionClickEvent{\n\t\t\t\tUiCallToActionClickEvent: &usageeventsv1.UICallToActionClickEvent{\n\t\t\t\t\tCta: usageeventsv1.CTA(cta),\n\t\t\t\t}}},\n\t\t\tnil\n\n\tcase questionnaireSubmitEvent:\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_UiOnboardQuestionnaireSubmit{\n\t\t\t\tUiOnboardQuestionnaireSubmit: &usageeventsv1.UIOnboardQuestionnaireSubmitEvent{},\n\t\t\t}},\n\t\t\tnil\n\n\tcase featureRecommendationEvent:\n\t\tevent := struct {\n\t\t\tFeature int32 `json:\"feature\"`\n\t\t\tFeatureRecommendationStatus int32 `json:\"featureRecommendationStatus\"`\n\t\t}{}\n\n\t\tif err := json.Unmarshal([]byte(*req.EventData), &event); err != nil {\n\t\t\treturn nil, trace.BadParameter(\"eventData is invalid: %v\", err)\n\t\t}\n\n\t\treturn &usageeventsv1.UsageEventOneOf{Event: &usageeventsv1.UsageEventOneOf_FeatureRecommendationEvent{\n\t\t\t\tFeatureRecommendationEvent: &usageeventsv1.FeatureRecommendationEvent{\n\t\t\t\t\tFeature: usageeventsv1.Feature(event.Feature),\n\t\t\t\t\tFeatureRecommendationStatus: usageeventsv1.FeatureRecommendationStatus(event.FeatureRecommendationStatus),\n\t\t\t\t},\n\t\t\t}},\n\t\t\tnil\n\t}\n\n\treturn nil, trace.BadParameter(\"invalid event %s\", req.Event)\n}", "func (req *UpsertObjectRequest) Opts(opts UpsertObjectOpts) *UpsertObjectRequest {\n\treq.opts = opts\n\treturn req\n}", "func (e UpsertEventRequestValidationError) Reason() string { return e.reason }", "func (ro *ReplaceOptions) SetUpsert(b bool) *ReplaceOptions {\n\tro.Upsert = &b\n\treturn ro\n}", "func (m *UpsertEventRequest_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (o *Offer) Upsert(exec boil.Executor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"stellarcore: no offers provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(offerColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tofferUpsertCacheMut.RLock()\n\tcache, cached := offerUpsertCache[key]\n\tofferUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tofferColumns,\n\t\t\tofferColumnsWithDefault,\n\t\t\tofferColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tofferColumns,\n\t\t\tofferPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"stellarcore: unable to upsert offers, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(offerPrimaryKeyColumns))\n\t\t\tcopy(conflict, offerPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"offers\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(offerType, offerMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(offerType, offerMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRow(cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.Exec(cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"stellarcore: unable to upsert offers\")\n\t}\n\n\tif !cached {\n\t\tofferUpsertCacheMut.Lock()\n\t\tofferUpsertCache[key] = cache\n\t\tofferUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(exec)\n}", "func WithUpsert() Option {\n\treturn func(o *Options) {\n\t\to.Upsert = true\n\t}\n}", "func ValidateOptions(options []*commonpb.KeyValuePair) error {\n\toptionMap := funcutil.KeyValuePair2Map(options)\n\t// StartTs should be int\n\t_, ok := optionMap[StartTs]\n\tvar startTs uint64\n\tvar endTs uint64 = math.MaxInt64\n\tvar err error\n\tif ok {\n\t\tstartTs, err = strconv.ParseUint(optionMap[StartTs], 10, 64)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// EndTs should be int\n\t_, ok = optionMap[EndTs]\n\tif ok {\n\t\tendTs, err = strconv.ParseUint(optionMap[EndTs], 10, 64)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif startTs > endTs {\n\t\treturn errors.New(\"start_ts shouldn't be larger than end_ts\")\n\t}\n\treturn nil\n}", "func (ocHandler *OrderConstraintsOverridesHandler) Upsert(pair *model.TradingPair, override *model.OrderConstraintsOverride) {\n\texistingOverride, exists := ocHandler.overrides[pair.String()]\n\tif !exists {\n\t\tocHandler.overrides[pair.String()] = override\n\t\treturn\n\t}\n\n\texistingOverride.Augment(override)\n\tocHandler.overrides[pair.String()] = existingOverride\n}", "func (m *GetEventByIDResponse_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn GetEventByIDResponse_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (m *UpsertEventResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (e UpsertEventRequest_SectionValidationError) Cause() error { return e.cause }", "func (o *CatalogProductCustomOptionRepositoryV1SavePutBody) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := o.validateOption(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (mr *MockVirtualMeshCertificateSigningRequestClientMockRecorder) UpsertVirtualMeshCertificateSigningRequest(ctx, obj interface{}, transitionFuncs ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{ctx, obj}, transitionFuncs...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertVirtualMeshCertificateSigningRequest\", reflect.TypeOf((*MockVirtualMeshCertificateSigningRequestClient)(nil).UpsertVirtualMeshCertificateSigningRequest), varargs...)\n}", "func (e UpsertEventRequest_QuestionValidationError) Cause() error { return e.cause }", "func (o *CMFFamilyUserPoliciesTake) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_family_user_policies_take provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfFamilyUserPoliciesTakeColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFFamilyUserPoliciesTakeUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfFamilyUserPoliciesTakeUpsertCacheMut.RLock()\n\tcache, cached := cmfFamilyUserPoliciesTakeUpsertCache[key]\n\tcmfFamilyUserPoliciesTakeUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfFamilyUserPoliciesTakeAllColumns,\n\t\t\tcmfFamilyUserPoliciesTakeColumnsWithDefault,\n\t\t\tcmfFamilyUserPoliciesTakeColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfFamilyUserPoliciesTakeAllColumns,\n\t\t\tcmfFamilyUserPoliciesTakePrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_family_user_policies_take, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_family_user_policies_take`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_family_user_policies_take` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_family_user_policies_take\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfFamilyUserPoliciesTakeMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfFamilyUserPoliciesTakeType, cmfFamilyUserPoliciesTakeMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_family_user_policies_take\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_family_user_policies_take\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfFamilyUserPoliciesTakeUpsertCacheMut.Lock()\n\t\tcmfFamilyUserPoliciesTakeUpsertCache[key] = cache\n\t\tcmfFamilyUserPoliciesTakeUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (e UpsertEventRequest_OptionValidationError) Field() string { return e.field }", "func (opt MergePullRequestOption) Validate(c *Client) error {\n\tif opt.Style == MergeStyleSquash {\n\t\tif err := c.checkServerVersionGreaterThanOrEqual(version1_11_5); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func (mr *MockVirtualMeshCertificateSigningRequestWriterMockRecorder) UpsertVirtualMeshCertificateSigningRequest(ctx, obj interface{}, transitionFuncs ...interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\tvarargs := append([]interface{}{ctx, obj}, transitionFuncs...)\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"UpsertVirtualMeshCertificateSigningRequest\", reflect.TypeOf((*MockVirtualMeshCertificateSigningRequestWriter)(nil).UpsertVirtualMeshCertificateSigningRequest), varargs...)\n}", "func (e *endpoint) SetSockOpt(opt interface{}) *tcpip.Error {\n\tswitch v := opt.(type) {\n\tcase tcpip.TimestampOption:\n\t\te.rcvMu.Lock()\n\t\te.rcvTimestamp = v != 0\n\t\te.rcvMu.Unlock()\n\t}\n\treturn nil\n}", "func (o *Vendor) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no vendors provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(vendorColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLVendorUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tvendorUpsertCacheMut.RLock()\n\tcache, cached := vendorUpsertCache[key]\n\tvendorUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tvendorColumns,\n\t\t\tvendorColumnsWithDefault,\n\t\t\tvendorColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tvendorColumns,\n\t\t\tvendorPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert vendors, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"vendors\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `vendors` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(vendorType, vendorMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(vendorType, vendorMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for vendors\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == vendorMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(vendorType, vendorMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for vendors\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for vendors\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tvendorUpsertCacheMut.Lock()\n\t\tvendorUpsertCache[key] = cache\n\t\tvendorUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (d *DHCPv4) UpdateOption(opt Option) {\n\tif d.Options == nil {\n\t\td.Options = make(Options)\n\t}\n\td.Options.Update(opt)\n}", "func (a *Client) UpsertNetworkLocations(params *UpsertNetworkLocationsParams, opts ...ClientOption) (*UpsertNetworkLocationsOK, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewUpsertNetworkLocationsParams()\n\t}\n\top := &runtime.ClientOperation{\n\t\tID: \"upsert-network-locations\",\n\t\tMethod: \"PUT\",\n\t\tPathPattern: \"/fwmgr/entities/network-locations/v1\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &UpsertNetworkLocationsReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t}\n\tfor _, opt := range opts {\n\t\topt(op)\n\t}\n\n\tresult, err := a.transport.Submit(op)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*UpsertNetworkLocationsOK)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\t// safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue\n\tmsg := fmt.Sprintf(\"unexpected success response for upsert-network-locations: API contract not enforced by server. Client expected to get an error, but got: %T\", result)\n\tpanic(msg)\n}", "func (o *MultiAddressesSet) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no multi_addresses_sets provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(multiAddressesSetColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tmultiAddressesSetUpsertCacheMut.RLock()\n\tcache, cached := multiAddressesSetUpsertCache[key]\n\tmultiAddressesSetUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tmultiAddressesSetAllColumns,\n\t\t\tmultiAddressesSetColumnsWithDefault,\n\t\t\tmultiAddressesSetColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tmultiAddressesSetAllColumns,\n\t\t\tmultiAddressesSetPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert multi_addresses_sets, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(multiAddressesSetPrimaryKeyColumns))\n\t\t\tcopy(conflict, multiAddressesSetPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"multi_addresses_sets\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(multiAddressesSetType, multiAddressesSetMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(multiAddressesSetType, multiAddressesSetMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert multi_addresses_sets\")\n\t}\n\n\tif !cached {\n\t\tmultiAddressesSetUpsertCacheMut.Lock()\n\t\tmultiAddressesSetUpsertCache[key] = cache\n\t\tmultiAddressesSetUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (umm *UpdateManyModel) SetUpsert(upsert bool) *UpdateManyModel {\n\tumm.Upsert = &upsert\n\treturn umm\n}", "func NormalizeVoteOption(option string) string {\n\tswitch option {\n\tcase \"Yes\", \"yes\":\n\t\treturn v1beta1.OptionYes.String()\n\n\tcase \"Abstain\", \"abstain\":\n\t\treturn v1beta1.OptionAbstain.String()\n\n\tcase \"No\", \"no\":\n\t\treturn v1beta1.OptionNo.String()\n\n\tcase \"NoWithVeto\", \"no_with_veto\":\n\t\treturn v1beta1.OptionNoWithVeto.String()\n\n\tdefault:\n\t\treturn option\n\t}\n}", "func WithEvent(value string) OptFn {\n\treturn func(o *Opt) {\n\t\to.event = value\n\t}\n}", "func (m *PhasePriceOverride) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateUsagePriceOverrides(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func OplogAlwaysUpsert(alwaysUpsert bool) Option {\n\treturn func(args *Options) {\n\t\targs.OplogAlwaysUpsert = &alwaysUpsert\n\t}\n}", "func (m *AddonUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateChargeType(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePeriodUnit(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateType(formats); err != nil {\n\t\t// prop\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (fc *FakeCollection) Upsert(col string, value interface{}, opts *gocb.UpsertOptions) (*gocb.MutationResult, error) {\n\tif fc.Force == \"error\" {\n\t\treturn &gocb.MutationResult{}, errors.New(\"Forced collection upsert error\")\n\t}\n\treturn &gocb.MutationResult{}, nil\n}", "func (req *CreateItemRequest) ToUpsertItemSpec() *spec.UpsertItemSpec {\n\tvar upsertItemSpec spec.UpsertItemSpec\n\tupsertItemSpec.Name = req.Name\n\tupsertItemSpec.Description = req.Description\n\tupsertItemSpec.Tags = req.Tags\n\n\treturn &upsertItemSpec\n}", "func (o ProfileOptionMatchesRule) Validate(v interface{}) error {\n\toption, ok := v.(isQueryRequest_Options)\n\tif !ok {\n\t\treturn fmt.Errorf(\"query request option is not a query request option\")\n\t}\n\n\tswitch o.mode {\n\tcase QueryRequest_MODE_SINGLE_UNSPECIFIED:\n\t\tif _, ok := option.(*QueryRequest_Single); !ok {\n\t\t\treturn fmt.Errorf(\"invalid option for mode\")\n\t\t}\n\t\treturn nil\n\tcase QueryRequest_MODE_DIFF:\n\t\tif _, ok := option.(*QueryRequest_Diff); !ok {\n\t\t\treturn fmt.Errorf(\"invalid option for mode\")\n\t\t}\n\t\treturn nil\n\tcase QueryRequest_MODE_MERGE:\n\t\tif _, ok := option.(*QueryRequest_Merge); !ok {\n\t\t\treturn fmt.Errorf(\"invalid option for mode\")\n\t\t}\n\t\treturn nil\n\tdefault:\n\t\treturn fmt.Errorf(\"invalid query request mode\")\n\t}\n}", "func (o *Utxo) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no utxo provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(utxoColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tutxoUpsertCacheMut.RLock()\n\tcache, cached := utxoUpsertCache[key]\n\tutxoUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tutxoAllColumns,\n\t\t\tutxoColumnsWithDefault,\n\t\t\tutxoColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tutxoAllColumns,\n\t\t\tutxoPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert utxo, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(utxoPrimaryKeyColumns))\n\t\t\tcopy(conflict, utxoPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"utxo\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(utxoType, utxoMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(utxoType, utxoMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert utxo\")\n\t}\n\n\tif !cached {\n\t\tutxoUpsertCacheMut.Lock()\n\t\tutxoUpsertCache[key] = cache\n\t\tutxoUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func ParseOption(c *caddy.Controller, options *Options) bool {\n\tv := c.Val()\n\tswitch v {\n\tcase \"command\":\n\t\targs := c.RemainingArgs()\n\t\tif len(args) == 1 {\n\t\t\toptions.Command = args[0]\n\t\t} else {\n\t\t\tlog.Printf(\"Option 'command' expects 1 argument\\n\")\n\t\t\treturn false\n\t\t}\n\t\tbreak\n\tcase \"args\":\n\t\toptions.Args = c.RemainingArgs()\n\t\tbreak\n\tcase \"dir\":\n\t\targs := c.RemainingArgs()\n\t\tif len(args) == 1 {\n\t\t\toptions.Dir = args[0]\n\t\t} else {\n\t\t\tlog.Printf(\"Option 'dir' expects 1 argument\\n\")\n\t\t\treturn false\n\t\t}\n\t\tbreak\n\tcase \"redirect_stdout\":\n\t\tif c.NextArg() {\n\t\t\toptions.RedirectStdout = c.Val()\n\t\t} else {\n\t\t\toptions.RedirectStdout = \"stdout\"\n\t\t}\n\t\tbreak\n\tcase \"redirect_stderr\":\n\t\tif c.NextArg() {\n\t\t\toptions.RedirectStderr = c.Val()\n\t\t} else {\n\t\t\toptions.RedirectStderr = \"stderr\"\n\t\t}\n\t\tbreak\n\tcase \"restart_policy\":\n\t\targs := c.RemainingArgs()\n\t\tif len(args) == 1 {\n\t\t\tswitch args[0] {\n\t\t\tcase \"always\":\n\t\t\t\toptions.RestartPolicy = RestartAlways\n\t\t\t\tbreak\n\t\t\tcase \"on_failure\":\n\t\t\t\toptions.RestartPolicy = RestartOnFailure\n\t\t\t\tbreak\n\t\t\tcase \"never\":\n\t\t\t\toptions.RestartPolicy = RestartNever\n\t\t\t\tbreak\n\t\t\tdefault:\n\t\t\t\toptions.RestartPolicy = RestartNever\n\t\t\t\tlog.Printf(\"Invalid 'restart' option %v\\n\", options.RestartPolicy)\n\t\t\t\treturn false\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Printf(\"Option 'restart' expects 1 argument\\n\")\n\t\t\treturn false\n\t\t}\n\tcase \"termination_grace_period\":\n\t\targs := c.RemainingArgs()\n\t\tif len(args) == 1 {\n\t\t\tperiod, err := time.ParseDuration(args[0])\n\t\t\tif err == nil {\n\t\t\t\toptions.TerminationGracePeriod = period\n\t\t\t} else {\n\t\t\t\tlog.Printf(\"Invalid 'termination_grace_period' value %v\\n\", args[0])\n\t\t\t\treturn false\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Printf(\"Option 'termination_grace_period' expects 1 argument\\n\")\n\t\t\treturn false\n\t\t}\n\tcase \"env\":\n\t\targs := c.RemainingArgs()\n\t\tif len(args) == 2 {\n\t\t\toptions.Env = append(options.Env, args[0]+\"=\"+args[1])\n\t\t} else if len(args) == 1 && strings.Contains(args[0], \"=\") {\n\t\t\toptions.Env = append(options.Env, args[0])\n\t\t} else {\n\t\t\tlog.Printf(\"Option 'env' expects 2 argument in format KEY VALUE or 1 argument in format KEY=VALUE\\n\")\n\t\t\treturn false\n\t\t}\n\t\tbreak\n\tcase \"replicas\":\n\t\targs := c.RemainingArgs()\n\t\tif len(args) == 1 {\n\t\t\treplicas, err := strconv.Atoi(args[0])\n\t\t\tif err == nil {\n\t\t\t\toptions.Replicas = replicas\n\t\t\t} else {\n\t\t\t\tlog.Printf(\"Invalid 'replicas' value %v\\n\", args[0])\n\t\t\t\treturn false\n\t\t\t}\n\t\t} else {\n\t\t\tlog.Printf(\"Option 'replicas' expects 1 argument\\n\")\n\t\t\treturn false\n\t\t}\n\t\tbreak\n\t}\n\n\treturn true\n}", "func (o *Employee) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no employee provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(employeeColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\temployeeUpsertCacheMut.RLock()\n\tcache, cached := employeeUpsertCache[key]\n\temployeeUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeeColumnsWithDefault,\n\t\t\temployeeColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\temployeeAllColumns,\n\t\t\temployeePrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert employee, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(employeePrimaryKeyColumns))\n\t\t\tcopy(conflict, employeePrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"employee\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(employeeType, employeeMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(employeeType, employeeMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert employee\")\n\t}\n\n\tif !cached {\n\t\temployeeUpsertCacheMut.Lock()\n\t\temployeeUpsertCache[key] = cache\n\t\temployeeUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (m *RequestEmailUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEmail(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func ValidateUpdateRequest(message *taskspb.UpdateRequest) (err error) {\n\tif message.Task == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"task\", \"message\"))\n\t}\n\tif message.Task != nil {\n\t\tif err2 := ValidateStoredTask(message.Task); err2 != nil {\n\t\t\terr = goa.MergeErrors(err, err2)\n\t\t}\n\t}\n\treturn\n}", "func (o *Peer) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"model: no peers provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(peerColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLPeerUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tpeerUpsertCacheMut.RLock()\n\tcache, cached := peerUpsertCache[key]\n\tpeerUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tpeerAllColumns,\n\t\t\tpeerColumnsWithDefault,\n\t\t\tpeerColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tpeerAllColumns,\n\t\t\tpeerPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"model: unable to upsert peers, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"peers\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `peers` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(peerType, peerMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(peerType, peerMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to upsert for peers\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = uint(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == peerMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(peerType, peerMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to retrieve unique values for peers\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"model: unable to populate default values for peers\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tpeerUpsertCacheMut.Lock()\n\t\tpeerUpsertCache[key] = cache\n\t\tpeerUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (m *FreeIpaUpscaleV1Request) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateEnvironmentCrn(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateTargetAvailabilityType(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *LolPlayerLevelUpPlayerLevelUpEvent) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (o *PeerProperty) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no peer_properties provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(peerPropertyColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tpeerPropertyUpsertCacheMut.RLock()\n\tcache, cached := peerPropertyUpsertCache[key]\n\tpeerPropertyUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tpeerPropertyAllColumns,\n\t\t\tpeerPropertyColumnsWithDefault,\n\t\t\tpeerPropertyColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tpeerPropertyAllColumns,\n\t\t\tpeerPropertyPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert peer_properties, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(peerPropertyPrimaryKeyColumns))\n\t\t\tcopy(conflict, peerPropertyPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"peer_properties\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(peerPropertyType, peerPropertyMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(peerPropertyType, peerPropertyMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert peer_properties\")\n\t}\n\n\tif !cached {\n\t\tpeerPropertyUpsertCacheMut.Lock()\n\t\tpeerPropertyUpsertCache[key] = cache\n\t\tpeerPropertyUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (e UpsertEventResponseValidationError) Reason() string { return e.reason }", "func (m *PayoutLocationUpdateRequest) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateID(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateUpdatedPayload(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (m *SecurityPolicyUpdateParams) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateApplyTo(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateEgress(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateIngress(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validatePolicyMode(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func (o *Email) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"mysql: no email provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(emailColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLEmailUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\temailUpsertCacheMut.RLock()\n\tcache, cached := emailUpsertCache[key]\n\temailUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\temailColumns,\n\t\t\temailColumnsWithDefault,\n\t\t\temailColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\temailColumns,\n\t\t\temailPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"mysql: unable to upsert email, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"email\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `email` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(emailType, emailMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(emailType, emailMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"mysql: unable to upsert for email\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == emailMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(emailType, emailMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"mysql: unable to retrieve unique values for email\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.retQuery)\n\t\tfmt.Fprintln(boil.DebugWriter, nzUniqueCols...)\n\t}\n\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"mysql: unable to populate default values for email\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\temailUpsertCacheMut.Lock()\n\t\temailUpsertCache[key] = cache\n\t\temailUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (v UpdateReputationProfileActionRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"PolicyID\": validation.Validate(v.PolicyID, validation.Required),\n\t\t\"ReputationProfileID\": validation.Validate(v.ReputationProfileID, validation.Required),\n\t}.Filter()\n}", "func MergeUpdateOptions(opts ...*UpdateOptions) *UpdateOptions {\n\tuOpts := Update()\n\tfor _, uo := range opts {\n\t\tif uo == nil {\n\t\t\tcontinue\n\t\t}\n\t\tif uo.ArrayFilters != nil {\n\t\t\tuOpts.ArrayFilters = uo.ArrayFilters\n\t\t}\n\t\tif uo.BypassDocumentValidation != nil {\n\t\t\tuOpts.BypassDocumentValidation = uo.BypassDocumentValidation\n\t\t}\n\t\tif uo.Upsert != nil {\n\t\t\tuOpts.Upsert = uo.Upsert\n\t\t}\n\t}\n\n\treturn uOpts\n}", "func (m *RuleConfigManager) Upsert(key string, r *RuleConfig, opts ...RequestOption) (err error) {\n\treturn m.Request(\"PUT\", m.URI(\"rules-configs\", key), r, opts...)\n}", "func (e GetEventByIDResponse_OptionValidationError) Reason() string { return e.reason }", "func (e UpsertEventRequestValidationError) Key() bool { return e.key }", "func (o *Origin) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no origins provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(originColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\toriginUpsertCacheMut.RLock()\n\tcache, cached := originUpsertCache[key]\n\toriginUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\toriginColumns,\n\t\t\toriginColumnsWithDefault,\n\t\t\toriginColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\toriginColumns,\n\t\t\toriginPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert origins, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(originPrimaryKeyColumns))\n\t\t\tcopy(conflict, originPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"origins\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(originType, originMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(originType, originMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert origins\")\n\t}\n\n\tif !cached {\n\t\toriginUpsertCacheMut.Lock()\n\t\toriginUpsertCache[key] = cache\n\t\toriginUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (o *Vote) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no vote provided for upsert\")\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(voteColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tvoteUpsertCacheMut.RLock()\n\tcache, cached := voteUpsertCache[key]\n\tvoteUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tvoteAllColumns,\n\t\t\tvoteColumnsWithDefault,\n\t\t\tvoteColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tvoteAllColumns,\n\t\t\tvotePrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert vote, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(votePrimaryKeyColumns))\n\t\t\tcopy(conflict, votePrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"vote\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(voteType, voteMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(voteType, voteMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert vote\")\n\t}\n\n\tif !cached {\n\t\tvoteUpsertCacheMut.Lock()\n\t\tvoteUpsertCache[key] = cache\n\t\tvoteUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (p *AutoCommitter) Upsert(pair []interface{}) (e error) {\n\tif p.started {\n\t\tp.docsUpsert <- pair\n\t} else {\n\t\te = errors.New(fmt.Sprintf(\"AutoCommitter-%s(%s)_is_closed\", p.name, p.coll))\n\t}\n\treturn\n}", "func (o *Options) Validate() error {\n\n\tif len(o.OrchestratorTopologyUser) == 0 {\n\t\to.OrchestratorTopologyUser = getFromEnvOrDefault(\"ORC_TOPOLOGY_USER\", \"\")\n\t}\n\tif len(o.OrchestratorTopologyPassword) == 0 {\n\t\to.OrchestratorTopologyPassword = getFromEnvOrDefault(\"ORC_TOPOLOGY_PASSWORD\", \"\")\n\t}\n\treturn nil\n}", "func (o *CMFFamiliesPolicy) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_families_policies provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfFamiliesPolicyColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFFamiliesPolicyUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfFamiliesPolicyUpsertCacheMut.RLock()\n\tcache, cached := cmfFamiliesPolicyUpsertCache[key]\n\tcmfFamiliesPolicyUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfFamiliesPolicyAllColumns,\n\t\t\tcmfFamiliesPolicyColumnsWithDefault,\n\t\t\tcmfFamiliesPolicyColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfFamiliesPolicyAllColumns,\n\t\t\tcmfFamiliesPolicyPrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_families_policies, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_families_policies`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_families_policies` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfFamiliesPolicyType, cmfFamiliesPolicyMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfFamiliesPolicyType, cmfFamiliesPolicyMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_families_policies\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfFamiliesPolicyMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfFamiliesPolicyType, cmfFamiliesPolicyMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_families_policies\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_families_policies\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfFamiliesPolicyUpsertCacheMut.Lock()\n\t\tcmfFamiliesPolicyUpsertCache[key] = cache\n\t\tcmfFamiliesPolicyUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (pdu *Pdu) AddOption(key OptionKey, val interface{}) {\n var option Option\n var err error\n\tiv := reflect.ValueOf(val)\n\tif iv.Kind() == reflect.String {\n if key == C.COAP_OPTION_ETAG {\n option, err = key.Opaque(val.(string))\n if err != nil {\n log.Errorf(\"Binary read data failed: %+v\", err)\n }\n } else {\n option = key.String(val.(string))\n }\n\t} else if iv.Kind() == reflect.Uint8 || iv.Kind() == reflect.Uint16 || iv.Kind() == reflect.Uint32 {\n option, err = key.Uint(val)\n if err != nil {\n log.Errorf(\"Binary read data failed: %+v\", err)\n }\n } else {\n log.Warnf(\"Unsupported type of option value. Current value type: %+v\\n\", iv.Kind().String())\n return\n\t}\n\tpdu.Options = append(pdu.Options, option)\n}", "func (payload *PutEventPayload) Validate() (err error) {\n\tif payload.Etype == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (v UpdateTransactionalEndpointRequest) Validate() error {\n\treturn validation.Errors{\n\t\t\"ConfigID\": validation.Validate(v.ConfigID, validation.Required),\n\t\t\"Version\": validation.Validate(v.Version, validation.Required),\n\t\t\"SecurityPolicyID\": validation.Validate(v.SecurityPolicyID, validation.Required),\n\t\t\"OperationID\": validation.Validate(v.OperationID, validation.Required),\n\t\t\"JsonPayload\": validation.Validate(v.JsonPayload, validation.Required),\n\t}.Filter()\n}", "func (client *Client) PutEventsWithOptions(eventList []*CloudEvent, runtime *util.RuntimeOptions) (_result *PutEventsResponse, _err error) {\n\tfor _, cloudEvent := range eventList {\n\t\tif tea.BoolValue(util.IsUnset(cloudEvent.Specversion)) {\n\t\t\tcloudEvent.Specversion = tea.String(\"1.0\")\n\t\t}\n\n\t\tif tea.BoolValue(util.IsUnset(cloudEvent.Datacontenttype)) {\n\t\t\tcloudEvent.Datacontenttype = tea.String(\"application/json; charset=utf-8\")\n\t\t}\n\n\t\t_err = util.ValidateModel(cloudEvent)\n\t\tif _err != nil {\n\t\t\treturn _result, _err\n\t\t}\n\t}\n\tbody := eventbridgeutil.Serialize(eventList)\n\t_result = &PutEventsResponse{}\n\t_body, _err := client.DoRequest(tea.String(\"putEvents\"), tea.String(\"HTTP\"), tea.String(\"POST\"), tea.String(\"/openapi/putEvents\"), nil, body, runtime)\n\tif _err != nil {\n\t\treturn _result, _err\n\t}\n\t_err = tea.Convert(_body, &_result)\n\treturn _result, _err\n}", "func (*UpsertApplicationEquipRequest) Descriptor() ([]byte, []int) {\n\treturn file_application_proto_rawDescGZIP(), []int{6}\n}", "func (payload *putEventPayload) Validate() (err error) {\n\tif payload.Etype == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"etype\"))\n\t}\n\tif payload.Action == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"action\"))\n\t}\n\tif payload.From == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`raw`, \"from\"))\n\t}\n\treturn\n}", "func (mt *EasypostOptions) Validate() (err error) {\n\tif mt.CodMethod != nil {\n\t\tif !(*mt.CodMethod == \"CASH\" || *mt.CodMethod == \"CHECK\" || *mt.CodMethod == \"MONEY_ORDER\") {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.cod_method`, *mt.CodMethod, []interface{}{\"CASH\", \"CHECK\", \"MONEY_ORDER\"}))\n\t\t}\n\t}\n\tif mt.HandlingInstructions != nil {\n\t\tif !(*mt.HandlingInstructions == \"ORMD\" || *mt.HandlingInstructions == \"LIMITED_QUANTITY\") {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.handling_instructions`, *mt.HandlingInstructions, []interface{}{\"ORMD\", \"LIMITED_QUANTITY\"}))\n\t\t}\n\t}\n\tif mt.LabelFormat != nil {\n\t\tif !(*mt.LabelFormat == \"PNG\" || *mt.LabelFormat == \"PDF\" || *mt.LabelFormat == \"ZPL\" || *mt.LabelFormat == \"EPL2\") {\n\t\t\terr = goa.MergeErrors(err, goa.InvalidEnumValueError(`response.label_format`, *mt.LabelFormat, []interface{}{\"PNG\", \"PDF\", \"ZPL\", \"EPL2\"}))\n\t\t}\n\t}\n\treturn\n}", "func (o *TrainingCost) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no training_costs provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\to.UpdatedAt = currTime\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(trainingCostColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\ttrainingCostUpsertCacheMut.RLock()\n\tcache, cached := trainingCostUpsertCache[key]\n\ttrainingCostUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\ttrainingCostAllColumns,\n\t\t\ttrainingCostColumnsWithDefault,\n\t\t\ttrainingCostColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\ttrainingCostAllColumns,\n\t\t\ttrainingCostPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert training_costs, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(trainingCostPrimaryKeyColumns))\n\t\t\tcopy(conflict, trainingCostPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"training_costs\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(trainingCostType, trainingCostMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(trainingCostType, trainingCostMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert training_costs\")\n\t}\n\n\tif !cached {\n\t\ttrainingCostUpsertCacheMut.Lock()\n\t\ttrainingCostUpsertCache[key] = cache\n\t\ttrainingCostUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (db *MongoDatabase) Upsert(collection_name string, selector interface{}, update interface{}) (*ChangeResults, error) {\n\tcurrent_session := db.GetSession()\n\tdefer current_session.Close()\n\n\tcollection := current_session.DB(db.name).C(collection_name)\n\n\tchange_info, err := collection.Upsert(selector, update)\n\n\tchange_results := ChangeResults{\n\t\tUpdated: change_info.Updated,\n\t\tDeleted: change_info.Removed,\n\t}\n\n\treturn &change_results, convertMgoError(err)\n}", "func (o *VSP) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no vsp provided for upsert\")\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(vspColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tvspUpsertCacheMut.RLock()\n\tcache, cached := vspUpsertCache[key]\n\tvspUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tvspAllColumns,\n\t\t\tvspColumnsWithDefault,\n\t\t\tvspColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tvspAllColumns,\n\t\t\tvspPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert vsp, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(vspPrimaryKeyColumns))\n\t\t\tcopy(conflict, vspPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"vsp\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(vspType, vspMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(vspType, vspMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert vsp\")\n\t}\n\n\tif !cached {\n\t\tvspUpsertCacheMut.Lock()\n\t\tvspUpsertCache[key] = cache\n\t\tvspUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func UpdateRenewOption(ctx context.Context, a *RenewOption) error {\n\tfields := []interface{}{\n\t\ta.ROLID,\n\t\ta.Dt,\n\t\ta.Opt,\n\t\ta.Rent,\n\t\ta.FLAGS,\n\t\ta.LastModBy,\n\t\ta.ROID,\n\t}\n\n\tvar err error\n\ta.LastModBy, err = genericUpdate(ctx, Wdb.Prepstmt.UpdateRenewOption, fields)\n\treturn updateError(err, \"RenewOption\", *a)\n}", "func NewUpsertRequest(space string) *UpsertRequest {\n\treq := new(UpsertRequest)\n\treq.initImpl(\"crud.upsert\")\n\treq.setSpace(space)\n\treq.tuple = Tuple{}\n\treq.operations = []Operation{}\n\treq.opts = UpsertOpts{}\n\treturn req\n}", "func (o *CMFUserExperienceLog) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no cmf_user_experience_log provided for upsert\")\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(cmfUserExperienceLogColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLCMFUserExperienceLogUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tcmfUserExperienceLogUpsertCacheMut.RLock()\n\tcache, cached := cmfUserExperienceLogUpsertCache[key]\n\tcmfUserExperienceLogUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tcmfUserExperienceLogAllColumns,\n\t\t\tcmfUserExperienceLogColumnsWithDefault,\n\t\t\tcmfUserExperienceLogColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tcmfUserExperienceLogAllColumns,\n\t\t\tcmfUserExperienceLogPrimaryKeyColumns,\n\t\t)\n\n\t\tif !updateColumns.IsNone() && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert cmf_user_experience_log, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"`cmf_user_experience_log`\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `cmf_user_experience_log` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(cmfUserExperienceLogType, cmfUserExperienceLogMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(cmfUserExperienceLogType, cmfUserExperienceLogMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for cmf_user_experience_log\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int64(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == cmfUserExperienceLogMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(cmfUserExperienceLogType, cmfUserExperienceLogMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for cmf_user_experience_log\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for cmf_user_experience_log\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tcmfUserExperienceLogUpsertCacheMut.Lock()\n\t\tcmfUserExperienceLogUpsertCache[key] = cache\n\t\tcmfUserExperienceLogUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (o *OrganizerInvitation) ValidateUpdate(tx *pop.Connection) (*validate.Errors, error) {\n\treturn validate.NewErrors(), nil\n}", "func (e SetApplicationPubSubRequestValidationError) Cause() error { return e.cause }", "func (o *AuthItemGroup) Upsert(ctx context.Context, exec boil.ContextExecutor, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no auth_item_groups provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(authItemGroupColumnsWithDefault, o)\n\tnzUniques := queries.NonZeroDefaultSet(mySQLAuthItemGroupUniqueColumns, o)\n\n\tif len(nzUniques) == 0 {\n\t\treturn errors.New(\"cannot upsert with a table that cannot conflict on a unique column\")\n\t}\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzUniques {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tauthItemGroupUpsertCacheMut.RLock()\n\tcache, cached := authItemGroupUpsertCache[key]\n\tauthItemGroupUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tauthItemGroupAllColumns,\n\t\t\tauthItemGroupColumnsWithDefault,\n\t\t\tauthItemGroupColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tauthItemGroupAllColumns,\n\t\t\tauthItemGroupPrimaryKeyColumns,\n\t\t)\n\n\t\tif len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert auth_item_groups, could not build update column list\")\n\t\t}\n\n\t\tret = strmangle.SetComplement(ret, nzUniques)\n\t\tcache.query = buildUpsertQueryMySQL(dialect, \"auth_item_groups\", update, insert)\n\t\tcache.retQuery = fmt.Sprintf(\n\t\t\t\"SELECT %s FROM `auth_item_groups` WHERE %s\",\n\t\t\tstrings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, ret), \",\"),\n\t\t\tstrmangle.WhereClause(\"`\", \"`\", 0, nzUniques),\n\t\t)\n\n\t\tcache.valueMapping, err = queries.BindMapping(authItemGroupType, authItemGroupMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(authItemGroupType, authItemGroupMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tresult, err := exec.ExecContext(ctx, cache.query, vals...)\n\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert for auth_item_groups\")\n\t}\n\n\tvar lastID int64\n\tvar uniqueMap []uint64\n\tvar nzUniqueCols []interface{}\n\n\tif len(cache.retMapping) == 0 {\n\t\tgoto CacheNoHooks\n\t}\n\n\tlastID, err = result.LastInsertId()\n\tif err != nil {\n\t\treturn ErrSyncFail\n\t}\n\n\to.ID = int(lastID)\n\tif lastID != 0 && len(cache.retMapping) == 1 && cache.retMapping[0] == authItemGroupMapping[\"id\"] {\n\t\tgoto CacheNoHooks\n\t}\n\n\tuniqueMap, err = queries.BindMapping(authItemGroupType, authItemGroupMapping, nzUniques)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to retrieve unique values for auth_item_groups\")\n\t}\n\tnzUniqueCols = queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), uniqueMap)\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.retQuery)\n\t\tfmt.Fprintln(writer, nzUniqueCols...)\n\t}\n\terr = exec.QueryRowContext(ctx, cache.retQuery, nzUniqueCols...).Scan(returns...)\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to populate default values for auth_item_groups\")\n\t}\n\nCacheNoHooks:\n\tif !cached {\n\t\tauthItemGroupUpsertCacheMut.Lock()\n\t\tauthItemGroupUpsertCache[key] = cache\n\t\tauthItemGroupUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (o *Ticket) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no tickets provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(ticketColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tticketUpsertCacheMut.RLock()\n\tcache, cached := ticketUpsertCache[key]\n\tticketUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tticketAllColumns,\n\t\t\tticketColumnsWithDefault,\n\t\t\tticketColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tticketAllColumns,\n\t\t\tticketPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert tickets, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(ticketPrimaryKeyColumns))\n\t\t\tcopy(conflict, ticketPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"tickets\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(ticketType, ticketMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(ticketType, ticketMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif errors.Is(err, sql.ErrNoRows) {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert tickets\")\n\t}\n\n\tif !cached {\n\t\tticketUpsertCacheMut.Lock()\n\t\tticketUpsertCache[key] = cache\n\t\tticketUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func InputEncoding(encoding cloudevents.Encoding) EventRecordOption {\n\treturn envOption(\"EVENT_ENCODING\", encoding.String())\n}", "func (o *Job) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no jobs provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif queries.MustTime(o.CreatedAt).IsZero() {\n\t\t\tqueries.SetScanner(&o.CreatedAt, currTime)\n\t\t}\n\t\tqueries.SetScanner(&o.UpdatedAt, currTime)\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(jobColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tjobUpsertCacheMut.RLock()\n\tcache, cached := jobUpsertCache[key]\n\tjobUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tjobColumns,\n\t\t\tjobColumnsWithDefault,\n\t\t\tjobColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tjobColumns,\n\t\t\tjobPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert jobs, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(jobPrimaryKeyColumns))\n\t\t\tcopy(conflict, jobPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"jobs\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(jobType, jobMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(jobType, jobMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.DebugMode {\n\t\tfmt.Fprintln(boil.DebugWriter, cache.query)\n\t\tfmt.Fprintln(boil.DebugWriter, vals)\n\t}\n\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert jobs\")\n\t}\n\n\tif !cached {\n\t\tjobUpsertCacheMut.Lock()\n\t\tjobUpsertCache[key] = cache\n\t\tjobUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (o *Latency) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no latencies provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\to.UpdatedAt = currTime\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t}\n\n\tif err := o.doBeforeUpsertHooks(ctx, exec); err != nil {\n\t\treturn err\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(latencyColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tlatencyUpsertCacheMut.RLock()\n\tcache, cached := latencyUpsertCache[key]\n\tlatencyUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tlatencyAllColumns,\n\t\t\tlatencyColumnsWithDefault,\n\t\t\tlatencyColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tlatencyAllColumns,\n\t\t\tlatencyPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert latencies, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(latencyPrimaryKeyColumns))\n\t\t\tcopy(conflict, latencyPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"latencies\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(latencyType, latencyMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(latencyType, latencyMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert latencies\")\n\t}\n\n\tif !cached {\n\t\tlatencyUpsertCacheMut.Lock()\n\t\tlatencyUpsertCache[key] = cache\n\t\tlatencyUpsertCacheMut.Unlock()\n\t}\n\n\treturn o.doAfterUpsertHooks(ctx, exec)\n}", "func (e UpdateMeetingV1RequestValidationError) Cause() error { return e.cause }", "func (rom *ReplaceOneModel) SetUpsert(upsert bool) *ReplaceOneModel {\n\trom.Upsert = &upsert\n\treturn rom\n}", "func (oee *OtxEpubEpub) Upsert(db XODB) error {\n\tvar err error\n\n\t// if already exist, bail\n\tif oee._exists {\n\t\treturn errors.New(\"insert failed: already exists\")\n\t}\n\n\t// sql query\n\tconst sqlstr = `INSERT INTO public.otx_epub_epub (` +\n\t\t`document_ptr_id, publisher, source, oebps_folder, manifest, contents` +\n\t\t`) VALUES (` +\n\t\t`$1, $2, $3, $4, $5, $6` +\n\t\t`) ON CONFLICT (document_ptr_id) DO UPDATE SET (` +\n\t\t`document_ptr_id, publisher, source, oebps_folder, manifest, contents` +\n\t\t`) = (` +\n\t\t`EXCLUDED.document_ptr_id, EXCLUDED.publisher, EXCLUDED.source, EXCLUDED.oebps_folder, EXCLUDED.manifest, EXCLUDED.contents` +\n\t\t`)`\n\n\t// run query\n\tXOLog(sqlstr, oee.DocumentPtrID, oee.Publisher, oee.Source, oee.OebpsFolder, oee.Manifest, oee.Contents)\n\t_, err = db.Exec(sqlstr, oee.DocumentPtrID, oee.Publisher, oee.Source, oee.OebpsFolder, oee.Manifest, oee.Contents)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// set existence\n\toee._exists = true\n\n\treturn nil\n}", "func ExpectUdevEventOnStop() Option {\n\treturn func(o *config) error {\n\t\to.expectUdevEventOnStop = true\n\t\treturn nil\n\t}\n}", "func (o *UpdateEventParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {\n\n\tif err := r.SetTimeout(o.timeout); err != nil {\n\t\treturn err\n\t}\n\tvar res []error\n\n\tif o.Body != nil {\n\t\tif err := r.SetBodyParam(o.Body); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// path param eventId\n\tif err := r.SetPathParam(\"eventId\", o.EventID); err != nil {\n\t\treturn err\n\t}\n\n\t// path param koronaAccountId\n\tif err := r.SetPathParam(\"koronaAccountId\", o.KoronaAccountID); err != nil {\n\t\treturn err\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func ServiceRequest() RequestOption {\n\treturn func(o *RequestOptions) {\n\t\to.RequestType = inter.ServiceRequestMessageType\n\t}\n}", "func (o *PremiumSlot) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns) error {\n\tif o == nil {\n\t\treturn errors.New(\"models: no premium_slots provided for upsert\")\n\t}\n\tif !boil.TimestampsAreSkipped(ctx) {\n\t\tcurrTime := time.Now().In(boil.GetLocation())\n\n\t\tif o.CreatedAt.IsZero() {\n\t\t\to.CreatedAt = currTime\n\t\t}\n\t}\n\n\tnzDefaults := queries.NonZeroDefaultSet(premiumSlotColumnsWithDefault, o)\n\n\t// Build cache key in-line uglily - mysql vs psql problems\n\tbuf := strmangle.GetBuffer()\n\tif updateOnConflict {\n\t\tbuf.WriteByte('t')\n\t} else {\n\t\tbuf.WriteByte('f')\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range conflictColumns {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(updateColumns.Kind))\n\tfor _, c := range updateColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tbuf.WriteString(strconv.Itoa(insertColumns.Kind))\n\tfor _, c := range insertColumns.Cols {\n\t\tbuf.WriteString(c)\n\t}\n\tbuf.WriteByte('.')\n\tfor _, c := range nzDefaults {\n\t\tbuf.WriteString(c)\n\t}\n\tkey := buf.String()\n\tstrmangle.PutBuffer(buf)\n\n\tpremiumSlotUpsertCacheMut.RLock()\n\tcache, cached := premiumSlotUpsertCache[key]\n\tpremiumSlotUpsertCacheMut.RUnlock()\n\n\tvar err error\n\n\tif !cached {\n\t\tinsert, ret := insertColumns.InsertColumnSet(\n\t\t\tpremiumSlotAllColumns,\n\t\t\tpremiumSlotColumnsWithDefault,\n\t\t\tpremiumSlotColumnsWithoutDefault,\n\t\t\tnzDefaults,\n\t\t)\n\t\tupdate := updateColumns.UpdateColumnSet(\n\t\t\tpremiumSlotAllColumns,\n\t\t\tpremiumSlotPrimaryKeyColumns,\n\t\t)\n\n\t\tif updateOnConflict && len(update) == 0 {\n\t\t\treturn errors.New(\"models: unable to upsert premium_slots, could not build update column list\")\n\t\t}\n\n\t\tconflict := conflictColumns\n\t\tif len(conflict) == 0 {\n\t\t\tconflict = make([]string, len(premiumSlotPrimaryKeyColumns))\n\t\t\tcopy(conflict, premiumSlotPrimaryKeyColumns)\n\t\t}\n\t\tcache.query = buildUpsertQueryPostgres(dialect, \"\\\"premium_slots\\\"\", updateOnConflict, ret, update, conflict, insert)\n\n\t\tcache.valueMapping, err = queries.BindMapping(premiumSlotType, premiumSlotMapping, insert)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif len(ret) != 0 {\n\t\t\tcache.retMapping, err = queries.BindMapping(premiumSlotType, premiumSlotMapping, ret)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\tvalue := reflect.Indirect(reflect.ValueOf(o))\n\tvals := queries.ValuesFromMapping(value, cache.valueMapping)\n\tvar returns []interface{}\n\tif len(cache.retMapping) != 0 {\n\t\treturns = queries.PtrsFromMapping(value, cache.retMapping)\n\t}\n\n\tif boil.IsDebug(ctx) {\n\t\twriter := boil.DebugWriterFrom(ctx)\n\t\tfmt.Fprintln(writer, cache.query)\n\t\tfmt.Fprintln(writer, vals)\n\t}\n\tif len(cache.retMapping) != 0 {\n\t\terr = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...)\n\t\tif err == sql.ErrNoRows {\n\t\t\terr = nil // Postgres doesn't return anything when there's no update\n\t\t}\n\t} else {\n\t\t_, err = exec.ExecContext(ctx, cache.query, vals...)\n\t}\n\tif err != nil {\n\t\treturn errors.Wrap(err, \"models: unable to upsert premium_slots\")\n\t}\n\n\tif !cached {\n\t\tpremiumSlotUpsertCacheMut.Lock()\n\t\tpremiumSlotUpsertCache[key] = cache\n\t\tpremiumSlotUpsertCacheMut.Unlock()\n\t}\n\n\treturn nil\n}", "func (mc *MongoCollection) Upsert(selector interface{}, update interface{}) (num int, err error) {\n\tinfo, err := mc.Collection.Upsert(selector, update)\n\tif err != nil {\n\t\treturn 0, err\n\t}\n\treturn info.Updated, nil\n}" ]
[ "0.60016966", "0.5814329", "0.54932946", "0.5230759", "0.5167224", "0.50555146", "0.49800754", "0.49221444", "0.48955178", "0.48686874", "0.48431334", "0.47825623", "0.46803236", "0.46420425", "0.4600674", "0.45458597", "0.4509187", "0.44892007", "0.4457135", "0.4454905", "0.44404316", "0.44319963", "0.44312966", "0.440623", "0.4386149", "0.4385352", "0.43656585", "0.43567538", "0.42840338", "0.42289728", "0.42224526", "0.42199895", "0.4218402", "0.42144027", "0.42130512", "0.4201192", "0.41838226", "0.41604507", "0.4151199", "0.41501284", "0.41494724", "0.41430673", "0.41344085", "0.41166717", "0.40957448", "0.40941384", "0.40876657", "0.40861854", "0.4082072", "0.40798524", "0.40777326", "0.40767097", "0.40543377", "0.40485406", "0.40344825", "0.40195164", "0.4006709", "0.3993963", "0.39759666", "0.3968731", "0.39668933", "0.3951463", "0.3946125", "0.3945916", "0.39308363", "0.39233768", "0.3910559", "0.39077216", "0.39074963", "0.39010853", "0.38749078", "0.3873398", "0.38600332", "0.3854577", "0.38481027", "0.38480982", "0.38405326", "0.38286042", "0.38285324", "0.38259327", "0.38034025", "0.37962496", "0.37932867", "0.3790082", "0.37566075", "0.37558126", "0.3752778", "0.37432495", "0.37421677", "0.37407342", "0.3738683", "0.37378928", "0.3735802", "0.37357542", "0.37353373", "0.37351212", "0.37260765", "0.37250713", "0.3723189", "0.37227705" ]
0.7035956
0
Field function returns field value.
func (e UpsertEventRequest_OptionValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e UpsertEventRequest_OptionValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e UpsertEventRequest_OptionValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e UpsertEventRequest_OptionValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on GetEventByIDResponse_Section with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *GetEventByIDResponse_Section) Validate() error { if m == nil { return nil } // no validation rules for Id // no validation rules for Name // no validation rules for Description // no validation rules for Position for idx, item := range m.GetQuestions() { _, _ = idx, item if v, ok := interface{}(item).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return GetEventByIDResponse_SectionValidationError{ field: fmt.Sprintf("Questions[%v]", idx), reason: "embedded message failed validation", cause: err, } } } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *GetEventByIDResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Participants\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetUpdatedAt()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\tfield: \"UpdatedAt\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t// no validation rules for IsOpened\n\n\t// no validation rules for IsApproved\n\n\treturn nil\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) HasSectionId() bool {\n\tif o != nil && o.SectionId.IsSet() {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) GetSectionId() int32 {\n\tif o == nil || o.SectionId.Get() == nil {\n\t\tvar ret int32\n\t\treturn ret\n\t}\n\treturn *o.SectionId.Get()\n}", "func (c *ClientWithResponses) GetSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getSectionResponse, error) {\n\trsp, err := c.GetSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionResponse(rsp)\n}", "func (e GetEventByIDResponse_SectionValidationError) Cause() error { return e.cause }", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) GetSectionIdOk() (*int32, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn o.SectionId.Get(), o.SectionId.IsSet()\n}", "func (e GetEventByIDResponse_SectionValidationError) Reason() string { return e.reason }", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) SetSectionId(v int32) {\n\to.SectionId.Set(&v)\n}", "func (m *UpsertEventRequest_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetName()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Name\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\tif utf8.RuneCountInString(m.GetDescription()) < 1 {\n\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\tfield: \"Description\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e GetEventByIDResponse_SectionValidationError) Key() bool { return e.key }", "func (c *ClientWithResponses) GetSectionsWithResponse(\n\tctx context.Context,\n\tparams *GetSectionsParams,\n) (*getSectionsResponse, error) {\n\trsp, err := c.GetSections(ctx, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionsResponse(rsp)\n}", "func (c *ClientWithResponses) GetCourseForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getCourseForSectionResponse, error) {\n\trsp, err := c.GetCourseForSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetCourseForSectionResponse(rsp)\n}", "func (p *Plex) GetSectionIDs(machineID string) (sectionIDResponse, error) {\n\trequestInfo.headers.Token = p.token\n\n\tquery := fmt.Sprintf(\"%s/api/servers/%s\", plexURL, machineID)\n\n\tresp, respErr := requestInfo.get(query)\n\n\tif respErr != nil {\n\t\treturn sectionIDResponse{}, respErr\n\t}\n\n\tdefer resp.Body.Close()\n\n\tvar result sectionIDResponse\n\n\tif err := xml.NewDecoder(resp.Body).Decode(&result); err != nil {\n\t\tfmt.Println(err.Error())\n\n\t\treturn sectionIDResponse{}, err\n\t}\n\n\treturn result, nil\n}", "func (c *ClientWithResponses) GetStudentsForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n\tparams *GetStudentsForSectionParams,\n) (*getStudentsForSectionResponse, error) {\n\trsp, err := c.GetStudentsForSection(ctx, id, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetStudentsForSectionResponse(rsp)\n}", "func (e GetEventByIDResponse_SectionValidationError) Field() string { return e.field }", "func (c *ClientWithResponses) GetSchoolForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getSchoolForSectionResponse, error) {\n\trsp, err := c.GetSchoolForSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSchoolForSectionResponse(rsp)\n}", "func (c *ClientWithResponses) GetDistrictForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getDistrictForSectionResponse, error) {\n\trsp, err := c.GetDistrictForSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetDistrictForSectionResponse(rsp)\n}", "func (c *ClientWithResponses) GetTeachersForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n\tparams *GetTeachersForSectionParams,\n) (*getTeachersForSectionResponse, error) {\n\trsp, err := c.GetTeachersForSection(ctx, id, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetTeachersForSectionResponse(rsp)\n}", "func (m *GetEventByIDResponse_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn GetEventByIDResponse_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) UnsetSectionId() {\n\to.SectionId.Unset()\n}", "func (c *ClientWithResponses) GetSectionsForCourseWithResponse(\n\tctx context.Context,\n\tid string,\n\tparams *GetSectionsForCourseParams,\n) (*getSectionsForCourseResponse, error) {\n\trsp, err := c.GetSectionsForCourse(ctx, id, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionsForCourseResponse(rsp)\n}", "func ParseGetSectionResponse(\n\trsp *http.Response,\n) (*getSectionResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getSectionResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest SectionResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (c *ClientWithResponses) GetSectionsForSchoolWithResponse(\n\tctx context.Context,\n\tid string,\n\tparams *GetSectionsForSchoolParams,\n) (*getSectionsForSchoolResponse, error) {\n\trsp, err := c.GetSectionsForSchool(ctx, id, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionsForSchoolResponse(rsp)\n}", "func (ini INI) SectionGet(sectionName string) (map[string]string, error) {\n\tif _, exist := ini[sectionName]; !exist {\n\t\treturn nil, ErrSectionMiss\n\t}\n\n\treturn ini[sectionName], nil\n}", "func (client *RoleAssignmentsClient) getByInvoiceSectionHandleResponse(resp *http.Response) (RoleAssignmentsClientGetByInvoiceSectionResponse, error) {\n\tresult := RoleAssignmentsClientGetByInvoiceSectionResponse{}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleAssignment); err != nil {\n\t\treturn RoleAssignmentsClientGetByInvoiceSectionResponse{}, err\n\t}\n\treturn result, nil\n}", "func ParseGetCourseForSectionResponse(\n\trsp *http.Response,\n) (*getCourseForSectionResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getCourseForSectionResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest CourseResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (client *RoleAssignmentsClient) getByInvoiceSectionHandleResponse(resp *http.Response) (RoleAssignmentsClientGetByInvoiceSectionResponse, error) {\n\tresult := RoleAssignmentsClientGetByInvoiceSectionResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleAssignment); err != nil {\n\t\treturn RoleAssignmentsClientGetByInvoiceSectionResponse{}, err\n\t}\n\treturn result, nil\n}", "func ParseGetDistrictForSectionResponse(\n\trsp *http.Response,\n) (*getDistrictForSectionResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getDistrictForSectionResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest DistrictResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (e *Entry) Section() string {\n\tconst slash = \"/\"\n\tsize := len(e.Request)\n\t//\n\tif size == 0 {\n\t\treturn slash\n\t}\n\n\tindexSlash := 0\n\tfor i := 0; i < size; i++ {\n\t\tif e.Request[i:i+1] == slash {\n\t\t\tindexSlash++\n\t\t\t//it meets the second slash, it can return the section\n\t\t\tif indexSlash == 2 {\n\t\t\t\treturn e.Request[:i]\n\t\t\t}\n\t\t}\n\t}\n\treturn e.Request\n}", "func (d *Data) GetSection(name string) (Section, bool) {\n\tfor _, s := range d.Sections {\n\t\tif s.Name == name {\n\t\t\treturn s, true\n\t\t}\n\t}\n\treturn Section{}, false\n}", "func (c *ConfigContainer) GetSection(section string) (map[string]string, error) {\n\tif v, ok := c.data[section]; ok {\n\t\tswitch val := v.(type) {\n\t\tcase map[string]interface{}:\n\t\t\tres := make(map[string]string, len(val))\n\t\t\tfor k2, v2 := range val {\n\t\t\t\tres[k2] = fmt.Sprintf(\"%v\", v2)\n\t\t\t}\n\t\t\treturn res, nil\n\t\tcase map[string]string:\n\t\t\treturn val, nil\n\t\tdefault:\n\t\t\treturn nil, fmt.Errorf(\"unexpected type: %v\", v)\n\t\t}\n\t}\n\treturn nil, errors.New(\"not exist section\")\n}", "func ParseGetSchoolForSectionResponse(\n\trsp *http.Response,\n) (*getSchoolForSectionResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getSchoolForSectionResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest SchoolResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (e UpsertEventRequest_SectionValidationError) Reason() string { return e.reason }", "func (mc *MessageCard) AddSection(section ...*MessageCardSection) error {\n\tfor _, s := range section {\n\t\t// bail if a completely nil section provided\n\t\tif s == nil {\n\t\t\treturn fmt.Errorf(\"func AddSection: nil MessageCardSection received\")\n\t\t}\n\n\t\t// Perform validation of all MessageCardSection fields in an effort to\n\t\t// avoid adding a MessageCardSection with zero value fields. This is\n\t\t// done to avoid generating an empty sections JSON array since the\n\t\t// Sections slice for the MessageCard type would technically not be at\n\t\t// a zero value state. Due to this non-zero value state, the\n\t\t// encoding/json package would end up including the Sections struct\n\t\t// field in the output JSON.\n\t\t// See also https://github.com/golang/go/issues/11939\n\t\tswitch {\n\t\t// If any of these cases trigger, skip over the `default` case\n\t\t// statement and add the section.\n\t\tcase s.Images != nil:\n\t\tcase s.Facts != nil:\n\t\tcase s.HeroImage != nil:\n\t\tcase s.StartGroup:\n\t\tcase s.Markdown:\n\t\tcase s.ActivityText != \"\":\n\t\tcase s.ActivitySubtitle != \"\":\n\t\tcase s.ActivityTitle != \"\":\n\t\tcase s.ActivityImage != \"\":\n\t\tcase s.Text != \"\":\n\t\tcase s.Title != \"\":\n\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"all fields found to be at zero-value, skipping section\")\n\t\t}\n\n\t\tmc.Sections = append(mc.Sections, s)\n\t}\n\n\treturn nil\n}", "func GetEdgercSection(c *cli.Context) string {\n\tedgercSection := c.String(\"section\")\n\tif edgercSection == \"\" {\n\t\treturn edgegrid.DefaultSection\n\t}\n\treturn edgercSection\n}", "func (s *Section) ID() string {\n\treturn s.id\n}", "func (c *ClientWithResponses) GetTermForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getTermForSectionResponse, error) {\n\trsp, err := c.GetTermForSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetTermForSectionResponse(rsp)\n}", "func ParseGetSectionsResponse(\n\trsp *http.Response,\n) (*getSectionsResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getSectionsResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest SectionsResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (mc *MessageCard) AddSection(section ...*MessageCardSection) error {\n\tfor _, s := range section {\n\t\tlogger.Printf(\"AddSection: MessageCardSection received: %+v\\n\", s)\n\n\t\t// bail if a completely nil section provided\n\t\tif s == nil {\n\t\t\treturn fmt.Errorf(\"func AddSection: nil MessageCardSection received\")\n\t\t}\n\n\t\t// Perform validation of all MessageCardSection fields in an effort to\n\t\t// avoid adding a MessageCardSection with zero value fields. This is\n\t\t// done to avoid generating an empty sections JSON array since the\n\t\t// Sections slice for the MessageCard type would technically not be at\n\t\t// a zero value state. Due to this non-zero value state, the\n\t\t// encoding/json package would end up including the Sections struct\n\t\t// field in the output JSON.\n\t\t// See also https://github.com/golang/go/issues/11939\n\t\tswitch {\n\t\t// If any of these cases trigger, skip over the `default` case\n\t\t// statement and add the section.\n\t\tcase s.Images != nil:\n\t\tcase s.Facts != nil:\n\t\tcase s.HeroImage != nil:\n\t\tcase s.StartGroup:\n\t\tcase s.Markdown:\n\t\tcase s.ActivityText != \"\":\n\t\tcase s.ActivitySubtitle != \"\":\n\t\tcase s.ActivityTitle != \"\":\n\t\tcase s.ActivityImage != \"\":\n\t\tcase s.Text != \"\":\n\t\tcase s.Title != \"\":\n\n\t\tdefault:\n\t\t\tlogger.Println(\"AddSection: No cases matched, all fields assumed to be at zero-value, skipping section\")\n\t\t\treturn fmt.Errorf(\"all fields found to be at zero-value, skipping section\")\n\t\t}\n\n\t\tlogger.Println(\"AddSection: section contains at least one non-zero value, adding section\")\n\t\tmc.Sections = append(mc.Sections, s)\n\t}\n\n\treturn nil\n}", "func (o *ExpenseApplicationsIndexResponseExpenseApplications) SetSectionIdNil() {\n\to.SectionId.Set(nil)\n}", "func (c *ClientWithResponses) GetTeacherForSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getTeacherForSectionResponse, error) {\n\trsp, err := c.GetTeacherForSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetTeacherForSectionResponse(rsp)\n}", "func ParseGetSectionsForCourseResponse(\n\trsp *http.Response,\n) (*getSectionsForCourseResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getSectionsForCourseResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest SectionsResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func ValidateCspaceEventResponse(body *CspaceEventResponse) (err error) {\n\tif body.ID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"id\", \"body\"))\n\t}\n\tif body.Title == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"title\", \"body\"))\n\t}\n\tif body.Start == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"start\", \"body\"))\n\t}\n\tif body.Start != nil {\n\t\terr = goa.MergeErrors(err, goa.ValidateFormat(\"body.start\", *body.Start, goa.FormatDateTime))\n\t}\n\tif body.End != nil {\n\t\terr = goa.MergeErrors(err, goa.ValidateFormat(\"body.end\", *body.End, goa.FormatDateTime))\n\t}\n\treturn\n}", "func (e UpsertEventRequest_SectionValidationError) Key() bool { return e.key }", "func GetSections(tokenID string) func([]token.Token) []token.Token {\n\treturn func(tokens []token.Token) []token.Token {\n\t\tnewTokens := []token.Token{}\n\t\twrite := false\n\t\tfor _, tkn := range tokens {\n\t\t\tif tkn.ID == tokenID {\n\t\t\t\twrite = true\n\t\t\t\tcontinue\n\t\t\t} else if isNotTokenID(tokenID, tkn.ID) {\n\t\t\t\tif write {\n\t\t\t\t\treturn newTokens\n\t\t\t\t}\n\t\t\t\twrite = false\n\t\t\t}\n\t\t\tif write {\n\t\t\t\tnewTokens = append(newTokens, tkn)\n\t\t\t}\n\t\t}\n\n\t\treturn newTokens\n\t}\n}", "func (o *PatchEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewPatchEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewPatchEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewPatchEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 422:\n\t\tresult := NewPatchEventsEventIDUnprocessableEntity()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (r *SectionGroupRequest) Get(ctx context.Context) (resObj *SectionGroup, err error) {\n\tvar query string\n\tif r.query != nil {\n\t\tquery = \"?\" + r.query.Encode()\n\t}\n\terr = r.JSONRequest(ctx, \"GET\", query, nil, &resObj)\n\treturn\n}", "func (c *Config) Section(section string) (map[string]string, error) {\n\tdict, err := c.cgm.LoadStore(section)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn dict.(map[string]string), nil\n}", "func (sr *SectionResponse) ToModel() *types.Section {\r\n\treturn &types.Section{\r\n\t\tID: sr.ID, Name: sr.Name, Symbol: sr.Slug,\r\n\t\tModuleID: sr.ModuleID, Items: nil,\r\n\t}\r\n}", "func (client *RoleAssignmentsClient) listByInvoiceSectionHandleResponse(resp *http.Response) (RoleAssignmentsClientListByInvoiceSectionResponse, error) {\n\tresult := RoleAssignmentsClientListByInvoiceSectionResponse{}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleAssignmentListResult); err != nil {\n\t\treturn RoleAssignmentsClientListByInvoiceSectionResponse{}, err\n\t}\n\treturn result, nil\n}", "func (client *RoleAssignmentsClient) listByInvoiceSectionHandleResponse(resp *http.Response) (RoleAssignmentsClientListByInvoiceSectionResponse, error) {\n\tresult := RoleAssignmentsClientListByInvoiceSectionResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleAssignmentListResult); err != nil {\n\t\treturn RoleAssignmentsClientListByInvoiceSectionResponse{}, err\n\t}\n\treturn result, nil\n}", "func (in *Section) DeepCopy() *Section {\n\tif in == nil {\n\t\treturn nil\n\t}\n\tout := new(Section)\n\tin.DeepCopyInto(out)\n\treturn out\n}", "func (schema *Schema) HasSection(section string) (exists bool) {\n\t_, exists = schema.Map[section]\n\treturn\n}", "func (f *File) Section(name string) *Section {\n\tsec, err := f.GetSection(name)\n\tif err != nil {\n\t\t// Note: It's OK here because the only possible error is empty section name,\n\t\t// but if it's empty, this piece of code won't be executed.\n\t\tsec, _ = f.NewSection(name)\n\t\treturn sec\n\t}\n\treturn sec\n}", "func (client *RoleAssignmentsClient) deleteByInvoiceSectionHandleResponse(resp *http.Response) (RoleAssignmentsClientDeleteByInvoiceSectionResponse, error) {\n\tresult := RoleAssignmentsClientDeleteByInvoiceSectionResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleAssignment); err != nil {\n\t\treturn RoleAssignmentsClientDeleteByInvoiceSectionResponse{}, err\n\t}\n\treturn result, nil\n}", "func (client *RoleAssignmentsClient) deleteByInvoiceSectionHandleResponse(resp *http.Response) (RoleAssignmentsClientDeleteByInvoiceSectionResponse, error) {\n\tresult := RoleAssignmentsClientDeleteByInvoiceSectionResponse{}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleAssignment); err != nil {\n\t\treturn RoleAssignmentsClientDeleteByInvoiceSectionResponse{}, err\n\t}\n\treturn result, nil\n}", "func ParseGetStudentsForSectionResponse(\n\trsp *http.Response,\n) (*getStudentsForSectionResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getStudentsForSectionResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest StudentsResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (c *ClientWithResponses) GetSectionsForStudentWithResponse(\n\tctx context.Context,\n\tid string,\n\tparams *GetSectionsForStudentParams,\n) (*getSectionsForStudentResponse, error) {\n\trsp, err := c.GetSectionsForStudent(ctx, id, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionsForStudentResponse(rsp)\n}", "func ParseGetSectionsForSchoolResponse(\n\trsp *http.Response,\n) (*getSectionsForSchoolResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getSectionsForSchoolResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest SectionsResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (o *SubmitReplayRequestEntity) HasEventId() bool {\n\tif o != nil && o.EventId != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (obj *codeMatch) Section() string {\n\treturn obj.section\n}", "func ParseGetTeachersForSectionResponse(\n\trsp *http.Response,\n) (*getTeachersForSectionResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getTeachersForSectionResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest TeachersResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (c *Control) SectionHits() *hits.SectionHitCounter {\n\treturn c.sectionHits\n}", "func (schema *Schema) GetSection(section string) (kv *SchemaKV, getErr error) {\n\tvar exists bool\n\tif kv, exists = schema.Map[section]; !exists { // If the section does not exist\n\t\tkv = nil\n\t\tgetErr = ErrSectionDoesNotExist\n\t}\n\n\treturn\n}", "func (o *SummaryResponse) HasEvents() bool {\n\tif o != nil && o.Events != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (o *GetCharactersCharacterIDCalendarEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 304:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotModified()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 400:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 401:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 403:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 420:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDEnhanceYourCalm()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 500:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 503:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDServiceUnavailable()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 504:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDGatewayTimeout()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (e UpsertEventRequest_SectionValidationError) Field() string { return e.field }", "func (m *SectionGroup) GetSections()([]OnenoteSectionable) {\n return m.sections\n}", "func (i *ServerInfo) Section(name string) *info.Section { return i.registry.Section(name) }", "func (c *Config) HasSection(section string) bool {\n\tif c == nil || c.mx == nil {\n\t\treturn false\n\t}\n\n\tc.mx.RLock()\n\tdefer c.mx.RUnlock()\n\n\treturn c.data[strings.ToLower(section)] == \"!\"\n}", "func (o *DeleteAPIV2EventsEventSubscriptionIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (r *Reader) NextSection() error {\n\tbeginOffset, err := r.fl.Seek(int64(r.nextOffset), io.SeekStart)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tvals := make([]byte, 16)\n\tbytesRead, err := r.fl.Read(vals)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// end marker\n\tif bytesRead == 8 && bytes.Equal(vals[:8], []byte{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff}) {\n\t\treturn io.EOF\n\t}\n\n\tsectionSize := binary.LittleEndian.Uint64(vals[:8])\n\trowCount := binary.LittleEndian.Uint64(vals[8:16])\n\n\tstr, err := readZeroTerminatedString(r.fl)\n\tif err != nil {\n\t\tif err == io.EOF {\n\t\t\treturn fmt.Errorf(\"EOF while reading string section (partial: %s)\", str)\n\t\t}\n\t\treturn err\n\t}\n\n\tr.nextOffset = uint64(beginOffset) + sectionSize + 8 // well well, sectionSize includes the rowCount I guess?\n\n\tr.CurrentSection = &Section{\n\t\tName: SectionName(strings.TrimRight(str, string([]byte{0x00}))),\n\t\tOffset: uint64(beginOffset),\n\t\tSize: sectionSize,\n\t\tRowCount: rowCount,\n\t\tBufferSize: sectionSize - uint64(len(str)) - 1 /* str-pad 0x00 byte */ - 8,\n\t\tBuffer: r.fl,\n\t}\n\treturn nil\n}", "func (o *DeleteEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewDeleteEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewDeleteEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewDeleteEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (r *Reader) ReadSection(s string, v interface{}) error {\n\tr.once.Do(r.init)\n\tif r.Err != nil {\n\t\treturn r.Err\n\t}\n\tc := r.config.Get(s)\n\tif c == nil {\n\t\treturn fmt.Errorf(\"section %s not found\", s)\n\t}\n\treturn read(reflect.ValueOf(v).Elem(), c, r.Strict)\n}", "func (e UpsertEventRequest_SectionValidationError) Cause() error { return e.cause }", "func (f *File) GetSection(name string) (*Section, error) {\n\tif len(name) == 0 {\n\t\tname = DEFAULT_SECTION\n\t}\n\n\tif f.BlockMode {\n\t\tf.lock.RLock()\n\t\tdefer f.lock.RUnlock()\n\t}\n\n\tsec := f.sections[name]\n\tif sec == nil {\n\t\treturn nil, fmt.Errorf(\"error when getting section: section '%s' not exists\", name)\n\t}\n\treturn sec, nil\n}", "func TestSections(t *testing.T) {\n\tm := map[string]string{\"root\": \"/tmp\",\n\t\t\"PROC\": \"4\",\n\t}\n\tconf := New(m)\n\tstr := \" # Comment \\nsome = true\\nother = false \\n [ section ] \\nitem=2 \\n[ section2 ]\\n item = 3\"\n\terr := conf.Load(strings.NewReader(str))\n\tif err != nil {\n\t\tt.Error(\"Echec Load\", err)\n\t} else {\n\t\tv := conf.Sections()\n\t\tif len(v) != 2 {\n\t\t\tt.Error(\"Sections() error : found \", v, \" expected len == 2\")\n\t\t}\n\n\t\tc := v[\"section\"]\n\t\tif c == nil {\n\t\t\tt.Error(\"Sections() error : subsection 'section' not found \", v)\n\t\t} else {\n\t\t\tv, err := c.String(\"item\")\n\t\t\tif err != nil || v != \"2\" {\n\t\t\t\tt.Error(\"Section('section2.item') error : found \", v, \" expected 2, for :\", c)\n\t\t\t}\n\t\t}\n\n\t\tc = v[\"section2\"]\n\t\tif c == nil {\n\t\t\tt.Error(\"Sections() error : subsection 'section2' not found \", v)\n\t\t}\n\n\t\tc = v[\"section3\"]\n\t\tif c != nil {\n\t\t\tt.Error(\"Sections() error : subsection 'section3' found \", v)\n\t\t}\n\t}\n}", "func (m *CourseSection) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (m *SectionGroup) GetSectionsUrl()(*string) {\n return m.sectionsUrl\n}", "func (o *DhcpRangeDataData) HasRangeId() bool {\n\tif o != nil && o.RangeId != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (mr *MockAdminSectionRepoMockRecorder) GetSection(arg0, arg1 interface{}) *gomock.Call {\n\tmr.mock.ctrl.T.Helper()\n\treturn mr.mock.ctrl.RecordCallWithMethodType(mr.mock, \"GetSection\", reflect.TypeOf((*MockAdminSectionRepo)(nil).GetSection), arg0, arg1)\n}", "func (o *UserInvitationResponseData) HasId() bool {\n\treturn o != nil && o.Id != nil\n}", "func (secRepo SectionsRepositoryImpl) GetSections(ctx context.Context,\n\tcourseID uint32, sectionID uint32) (*models.Sections, error) {\n\n\t//databse connection\n\tconn := secRepo.dbConn\n\tsection := models.Sections{}\n\n\tif err := conn.Where(\"id=?\", sectionID).Find(&section).Error; err != nil {\n\t\tlog.Logger(ctx).Error(err)\n\t\treturn nil, err\n\t}\n\treturn &section, nil\n\n}", "func EventCommandsSection(cmdPrefix string, optionItems []OptionItem) Section {\n\tsection := Section{\n\t\tSelects: Selects{\n\t\t\tID: \"\",\n\t\t\tItems: []Select{\n\t\t\t\t{\n\t\t\t\t\tName: \"Run command...\",\n\t\t\t\t\tCommand: cmdPrefix,\n\t\t\t\t\tOptionGroups: []OptionGroup{\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tName: \"Supported commands\",\n\t\t\t\t\t\t\tOptions: optionItems,\n\t\t\t\t\t\t},\n\t\t\t\t\t},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\treturn section\n}", "func (e GetEventByIDResponse_OptionValidationError) Key() bool { return e.key }", "func ParseGetSectionsForStudentResponse(\n\trsp *http.Response,\n) (*getSectionsForStudentResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &getSectionsForStudentResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest SectionsResponse\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 404:\n\t\tvar dest NotFound\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON404 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func (client *PermissionsClient) listByInvoiceSectionsHandleResponse(resp *http.Response) (PermissionsClientListByInvoiceSectionsResponse, error) {\n\tresult := PermissionsClientListByInvoiceSectionsResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.PermissionsListResult); err != nil {\n\t\treturn PermissionsClientListByInvoiceSectionsResponse{}, err\n\t}\n\treturn result, nil\n}", "func (a *UsersOnenoteNotebooksOnenoteSectionApiService) UsersOnenoteNotebooksSectionsParentSectionGroupGetSections(ctx _context.Context, userId string, notebookId string, onenoteSectionId string, onenoteSectionId1 string, localVarOptionals *UsersOnenoteNotebooksSectionsParentSectionGroupGetSectionsOpts) (MicrosoftGraphOnenoteSection, *_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodGet\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\tlocalVarReturnValue MicrosoftGraphOnenoteSection\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/users({user-id})/onenote/notebooks({notebook-id})/sections({onenoteSection-id})/parentSectionGroup/sections({onenoteSection-id1})\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"user-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", userId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"notebook-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", notebookId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId)), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"onenoteSection-id1\"+\"}\", _neturl.QueryEscape(fmt.Sprintf(\"%v\", onenoteSectionId1)), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\tif localVarOptionals != nil && localVarOptionals.Select_.IsSet() {\n\t\tlocalVarQueryParams.Add(\"$select\", parameterToString(localVarOptionals.Select_.Value(), \"csv\"))\n\t}\n\tif localVarOptionals != nil && localVarOptionals.Expand.IsSet() {\n\t\tlocalVarQueryParams.Add(\"$expand\", parameterToString(localVarOptionals.Expand.Value(), \"csv\"))\n\t}\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 200 {\n\t\t\tvar v MicrosoftGraphOnenoteSection\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 0 {\n\t\t\tvar v OdataError\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (ss *DrvSectionstore) Get(nid, sid string) (*secstore.Section, error) {\n\tcontent, err := download(ss.service, nid)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar sections []*secstore.Section\n\tif len(content) > 0 {\n\t\tsections, err = unmarshal(content)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// find the section in the array\n\tidx := indexOf(sections, sid)\n\tif idx == -1 {\n\t\tmsg := fmt.Sprintf(\"section with id '%s' not found\", sid)\n\t\treturn nil, errs.NewNotFoundError(msg)\n\t}\n\n\t// section found, so return the section\n\treturn sections[idx], nil\n}", "func (f *File) Section(name string) *Section {\n\tfor _, s := range f.Sections {\n\t\tif s.Name == name || (len(name) > 8 && s.Name == name[:8]) {\n\t\t\treturn s\n\t\t}\n\t}\n\treturn nil\n}", "func ReadSection4(f io.Reader, length int) (section Section4, err error) {\n\terr = read(f, &section.CoordinatesCount, &section.ProductDefinitionTemplateNumber)\n\tif err != nil {\n\t\treturn section, err\n\t}\n\n\tswitch section.ProductDefinitionTemplateNumber {\n\tcase 0:\n\t\terr = read(f, &section.ProductDefinitionTemplate)\n\tdefault:\n\t\t//return section, fmt.Errorf(\"Category definition template number %d not implemented yet\", section.ProductDefinitionTemplateNumber)\n\t\treturn section, nil\n\t}\n\n\tif err != nil {\n\t\treturn section, err\n\t}\n\n\tsection.Coordinates = make([]byte, section.CoordinatesCount)\n\n\treturn section, read(f, &section.Coordinates)\n}", "func (d *DebugData) GetElfSection(name string) ([]byte, uintptr, error) {\n\tsec := d.elfData.Section(\".\" + name)\n\tif sec != nil {\n\t\tdata, err := sec.Data()\n\t\treturn data, uintptr(sec.Addr), Error(err)\n\t}\n\n\tsec = d.elfData.Section(\".z\" + name)\n\tif sec == nil {\n\t\treturn nil, 0, Errorf(\"could not find .%s or .z%s section\", name, name)\n\t}\n\n\tb, err := sec.Data()\n\tif err != nil {\n\t\treturn nil, 0, Error(err)\n\t}\n\n\tdata, err := decompressMaybe(b)\n\treturn data, uintptr(sec.Addr), err\n}", "func (me *XHasElem_Section) Walk() (err error) {\n\tif fn := WalkHandlers.XHasElem_Section; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif err = me.Section.Walk(); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\treturn\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (o *BoardsSectionsPosition) Section(mods ...qm.QueryMod) sectionQuery {\n\tqueryMods := []qm.QueryMod{\n\t\tqm.Where(\"`id` = ?\", o.SectionID),\n\t}\n\n\tqueryMods = append(queryMods, mods...)\n\n\tquery := Sections(queryMods...)\n\tqueries.SetFrom(query.Query, \"`sections`\")\n\n\treturn query\n}", "func (s *EvaluationFormItem) SetSection(v *EvaluationFormSection) *EvaluationFormItem {\n\ts.Section = v\n\treturn s\n}", "func (c *ClientWithResponses) GetSectionsForTeacherWithResponse(\n\tctx context.Context,\n\tid string,\n\tparams *GetSectionsForTeacherParams,\n) (*getSectionsForTeacherResponse, error) {\n\trsp, err := c.GetSectionsForTeacher(ctx, id, params)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionsForTeacherResponse(rsp)\n}", "func (s *BaseAspidaListener) ExitSections(ctx *SectionsContext) {}", "func (m *SectionGroupCollectionResponse) GetValue()([]SectionGroupable) {\n return m.value\n}", "func (secRepo SectionsRepositoryImpl) CreateSection(ctx context.Context,\n\tsectionReq models.Sections) (interface{}, error) {\n\n\t//databse connection\n\tconn := secRepo.dbConn\n\tcreateOn := time.Now().In(time.UTC)\n\n\t//record create Time\n\tsectionReq.CreatedAt = createOn\n\n\t//record update time change\n\tsectionReq.UpdatedAt = createOn\n\n\td := conn.Create(&sectionReq)\n\tif d.Error != nil {\n\t\tlog.Logger(ctx).Error(d.Error)\n\t\treturn nil, d.Error\n\t}\n\n\treturn sectionReq.ID, nil\n}", "func UpdateSectionValidity(section MessageSectionWithSig, pkeyValidSince, pkeyValidUntil, sigValidSince, sigValidUntil int64, maxVal MaxCacheValidity) {\n\tif section != nil {\n\t\tvar maxValidity time.Duration\n\t\tswitch section.(type) {\n\t\tcase *AssertionSection:\n\t\t\tmaxValidity = maxVal.AssertionValidity\n\t\tcase *ShardSection:\n\t\t\tmaxValidity = maxVal.ShardValidity\n\t\tcase *ZoneSection:\n\t\t\tmaxValidity = maxVal.ZoneValidity\n\t\tcase *AddressAssertionSection:\n\t\t\tmaxValidity = maxVal.AddressAssertionValidity\n\t\tcase *AddressZoneSection:\n\t\t\tmaxValidity = maxVal.AddressZoneValidity\n\t\tdefault:\n\t\t\tlog.Warn(\"Not supported section\", \"type\", fmt.Sprintf(\"%T\", section))\n\t\t\treturn\n\t\t}\n\t\tif pkeyValidSince < sigValidSince {\n\t\t\tif pkeyValidUntil < sigValidUntil {\n\t\t\t\tsection.UpdateValidity(sigValidSince, pkeyValidUntil, maxValidity)\n\t\t\t} else {\n\t\t\t\tsection.UpdateValidity(sigValidSince, sigValidUntil, maxValidity)\n\t\t\t}\n\n\t\t} else {\n\t\t\tif pkeyValidUntil < sigValidUntil {\n\t\t\t\tsection.UpdateValidity(pkeyValidSince, pkeyValidUntil, maxValidity)\n\t\t\t} else {\n\t\t\t\tsection.UpdateValidity(pkeyValidSince, sigValidUntil, maxValidity)\n\t\t\t}\n\t\t}\n\t}\n}" ]
[ "0.65653867", "0.65613204", "0.6267581", "0.62241143", "0.61209005", "0.60969526", "0.6074486", "0.6038816", "0.5943432", "0.58214927", "0.5761712", "0.5556503", "0.55087125", "0.5488598", "0.54482377", "0.54351044", "0.54198366", "0.53738385", "0.5328823", "0.52608097", "0.51529944", "0.5117917", "0.503105", "0.49469808", "0.49119502", "0.49072626", "0.49028572", "0.4899861", "0.48904127", "0.48673615", "0.47808942", "0.4767668", "0.47508383", "0.47414434", "0.47413707", "0.47328112", "0.47281638", "0.47258565", "0.47258016", "0.47250593", "0.4716325", "0.46871307", "0.46802422", "0.46734336", "0.46530125", "0.4642347", "0.4639682", "0.46390563", "0.463165", "0.46265313", "0.46188807", "0.46108723", "0.46050292", "0.46042436", "0.46025705", "0.45928812", "0.4572293", "0.45710906", "0.45637593", "0.45630935", "0.45622113", "0.45459342", "0.4528346", "0.45108756", "0.45023617", "0.44973654", "0.44961944", "0.44902623", "0.44653845", "0.44423258", "0.44412404", "0.44384077", "0.4430362", "0.44295996", "0.44255355", "0.44227555", "0.44227424", "0.4406876", "0.44015053", "0.43899006", "0.438346", "0.43736666", "0.43733183", "0.43707877", "0.43696246", "0.43675244", "0.43644226", "0.43578675", "0.43559977", "0.4349303", "0.43429548", "0.43368223", "0.43350986", "0.43305302", "0.43251103", "0.42934373", "0.42870474", "0.42740768", "0.42685312", "0.42503893" ]
0.76326686
0
Field function returns field value.
func (e GetEventByIDResponse_SectionValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e GetEventByIDResponse_SectionValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e GetEventByIDResponse_SectionValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e GetEventByIDResponse_SectionValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on GetEventByIDResponse_Question with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *GetEventByIDResponse_Question) Validate() error { if m == nil { return nil } // no validation rules for Id // no validation rules for Content // no validation rules for Position // no validation rules for Type // no validation rules for IsRequired // no validation rules for LimitedChoice for idx, item := range m.GetOptions() { _, _ = idx, item if v, ok := interface{}(item).(interface{ Validate() error }); ok { if err := v.Validate(); err != nil { return GetEventByIDResponse_QuestionValidationError{ field: fmt.Sprintf("Options[%v]", idx), reason: "embedded message failed validation", cause: err, } } } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (e GetEventByIDResponse_QuestionValidationError) Reason() string { return e.reason }", "func (m *GetEventByIDResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Participants\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetUpdatedAt()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\tfield: \"UpdatedAt\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t// no validation rules for IsOpened\n\n\t// no validation rules for IsApproved\n\n\treturn nil\n}", "func (e GetEventByIDResponse_QuestionValidationError) Cause() error { return e.cause }", "func (m *GetEventByIDResponse_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Description\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (m *UpsertEventRequest_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn UpsertEventRequest_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e GetEventByIDResponse_QuestionValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Reason() string { return e.reason }", "func (m *GetEventByIDResponse_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn GetEventByIDResponse_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (e GetEventByIDResponse_QuestionValidationError) Key() bool { return e.key }", "func (fc *FileCreate) SetSurveyQuestionID(id int) *FileCreate {\n\tfc.mutation.SetSurveyQuestionID(id)\n\treturn fc\n}", "func (scsu *SurveyCellScanUpdate) SetSurveyQuestionID(id string) *SurveyCellScanUpdate {\n\tif scsu.survey_question == nil {\n\t\tscsu.survey_question = make(map[string]struct{})\n\t}\n\tscsu.survey_question[id] = struct{}{}\n\treturn scsu\n}", "func (scsuo *SurveyCellScanUpdateOne) SetSurveyQuestionID(id string) *SurveyCellScanUpdateOne {\n\tif scsuo.survey_question == nil {\n\t\tscsuo.survey_question = make(map[string]struct{})\n\t}\n\tscsuo.survey_question[id] = struct{}{}\n\treturn scsuo\n}", "func (s *Server) AnswerQuestion(ctx context.Context, req *pb.AnswerQuestionRequest) (resp *pb.AnswerQuestionResponse, err error) {\n\tglog.Debug(\"req:%v\", req)\n\tresp = &pb.AnswerQuestionResponse{\n\t\tStatus: common.GetInitStatus(),\n\t}\n\tdefer func() {\n\t\tif err != nil {\n\t\t\tresp.Status.Message = err.Error()\n\t\t\terr = nil\n\t\t}\n\t\tglog.Debug(\"resp:%v\", resp)\n\t}()\n\n\tuserID, _ := strconv.ParseUint(req.GetHeader().GetUserId(), 10, 64)\n\tquestionID, _ := strconv.ParseUint(req.GetQuestionId(), 10, 64)\n\toptionID, _ := strconv.ParseUint(req.GetOptionId(), 10, 64)\n\ttoken := req.GetHeader().GetToken()\n\n\tvar valid bool\n\tif valid, err = util.CheckIDValid(userID, token); !valid {\n\t\tresp.Status.Code = pb.Status_USER_NOTLOGIN\n\t\tglog.Error(\"userid:%d, token:%s check not valid. err:%v\", userID, token, err)\n\t\treturn\n\t}\n\n\t// 1. Check if the question has been answered\n\tkey := fmt.Sprintf(\"%s%s\", common.USER_ACT_PREFIX, strconv.FormatUint(userID, 10))\n\tret, err := db.HGet(key, strconv.FormatUint(questionID, 10))\n\tif ret != \"\" {\n\t\tsqlString := fmt.Sprintf(\"select option_content, is_answer, answer_num from question_option where question_id=%d and option_id=%s\", questionID, ret)\n\t\tvar content string\n\t\tvar isAnswer uint32\n\t\tvar answerNum uint32\n\t\tvar right bool\n\t\ttempRow, _ := db.QueryRow(common.BUDAODB, sqlString)\n\t\terr = tempRow.Scan(&content, &isAnswer, &answerNum)\n\t\tif isAnswer == 1 {\n\t\t\tright = true\n\t\t}\n\t\toptionItem := &pb.OptionItem{\n\t\t\tOptionId: ret,\n\t\t\tQuestionId: strconv.FormatUint(questionID, 10),\n\t\t\tContent: content,\n\t\t\tRight: right,\n\t\t\tChooseCount: answerNum,\n\t\t}\n\t\tresp.Option = optionItem\n\t\tresp.Status.Message = \"question has been answered\"\n\t\tresp.Status.Code = pb.Status_OK\n\t\treturn\n\t}\n\n\t// 2. get the question answer\n\tsqlString := fmt.Sprintf(\"select is_answer from question_option where question_id = %d and option_id = %d\", questionID, optionID)\n\tvar answerFlag int\n\tfirstRow, err := db.QueryRow(common.BUDAODB, sqlString)\n\terr = firstRow.Scan(&answerFlag)\n\tif err != nil {\n\t\tglog.Error(\"query question answer failed. err:%v\", err)\n\t\treturn\n\t}\n\n\t// 3. get the question score\n\tsqlString = fmt.Sprintf(\"select score, vid from question where id = %d\", questionID)\n\tvar score uint32\n\tvar vid uint64\n\tsecondRow, err := db.QueryRow(common.BUDAODB, sqlString)\n\terr = secondRow.Scan(&score, &vid)\n\tif err != nil {\n\t\tglog.Error(\"query question score failed. err:%v\", err)\n\t\treturn\n\t}\n\n\tvar (\n\t\tuserResult int\n\t\tuserScore uint32\n\t)\n\tuserTableName, err := db.GetTableName(\"user_\", userID)\n\ttableNumber := userID >> 54\n\tuserQuestionTN := fmt.Sprintf(\"user_question_%d\", tableNumber)\n\tif answerFlag == 1 {\n\t\t// answer correct\n\t\tsqlString = fmt.Sprintf(\"update %s set right_answer_num = right_answer_num+1, get_score = get_score+%d where uid = %d\", userTableName, score, userID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update user table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tsqlString = fmt.Sprintf(\"update question set right_answer_num = right_answer_num+1 where id = %d\", questionID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update question table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tuserScore = score\n\t\tuserResult = 1\n\t} else {\n\t\t// answer error\n\t\tsqlString = fmt.Sprintf(\"update %s set wrong_answer_num = wrong_answer_num+1 where uid = %d\", userTableName, userID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update user table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tsqlString = fmt.Sprintf(\"update question set wrong_answer_num = wrong_answer_num+1 where id = %d\", questionID)\n\t\t_, err = db.Exec(common.BUDAODB, sqlString)\n\t\tif err != nil {\n\t\t\tglog.Error(\"update question table failed. err:%v\", err)\n\t\t\treturn\n\t\t}\n\n\t\tuserScore = 0\n\t\tuserResult = 2\n\t}\n\n\tsqlString = fmt.Sprintf(\"update question_option set answer_num = answer_num+1 where question_id = %d and option_id = %d\", questionID, optionID)\n\t_, err = db.Exec(common.BUDAODB, sqlString)\n\tif err != nil {\n\t\tglog.Error(\"update question_option table failed. err:%v\", err)\n\t\treturn\n\t}\n\n\tsqlString = fmt.Sprintf(\"insert into %s (uid, question_id, result, get_score, option_id) values (%d, %d, %d, %d, %d)\", userQuestionTN, userID, questionID, userResult, userScore, optionID)\n\t_, err = db.Exec(common.BUDAODB, sqlString)\n\tif err != nil {\n\t\tglog.Error(\"answer question insert user_question_ table faild. err:%v\", err)\n\t\treturn\n\t}\n\n\t// update question_dynamic\n\tfield := fmt.Sprintf(\"%s%s\", strconv.FormatUint(questionID, 10), strconv.FormatUint(optionID, 10))\n\t_, err = db.HIncrBy(common.QUESTION_DYNAMIC, field, 1)\n\tif err != nil {\n\t\tglog.Error(\"answer question insert hash question_dynamic faild. err:%v\", err)\n\t}\n\n\t// inster user_act_[uid]\n\t_, err = db.HSet(key, strconv.FormatUint(questionID, 10), strconv.FormatUint(optionID, 10))\n\tif err != nil {\n\t\tglog.Error(\"answer question insert hash user_act_ faild. err:%v\", err)\n\t}\n\n\t// update vid_dynamic\n\t_, err = db.SAdd(common.VID_DYNAMIC, strconv.FormatUint(vid, 10))\n\tif err != nil {\n\t\tglog.Error(\"insert vid_dynamic set failed. err:%v\", err)\n\t\treturn\n\t}\n\n\tresp.Status.Code = pb.Status_OK\n\n\treturn\n}", "func (e UpsertEventRequest_QuestionValidationError) Reason() string { return e.reason }", "func (m *UpsertEventResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (this *ResponseEvent) GetResponse() message.Response {\n\treturn this.m_response\n}", "func (m *GetEventByIDRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (ac *AnswerCreate) SetQuestionID(id uuid.UUID) *AnswerCreate {\n\tac.mutation.SetQuestionID(id)\n\treturn ac\n}", "func TestAnswerGetByIDCorrectData(t *testing.T) {\n\tcMock := getMock()\n\tcMock.On(\"GetAnswerByID\", 1).Return(createdAnswer, nil)\n\n\tdata, err := AnswerGET(\"1\")\n\tif assert.Nil(t, err) {\n\t\tcMock.AssertExpectations(t)\n\n\t\tassert.Equal(t, createdAnswer.ID, data.ID)\n\t\tassert.Equal(t, createdAnswer.QuestionID, data.QuestionID)\n\t\tassert.Equal(t, *createdAnswer.Content, *data.Content)\n\t\tassert.Equal(t, createdAnswer.AuthorID, data.AuthorID)\n\t\tassert.Equal(t, *createdAnswer.IsBest, *data.IsBest)\n\t\tassert.Equal(t, createdAnswer.Created, data.Created)\n\t}\n}", "func (e GetEventByIDRequestValidationError) Reason() string { return e.reason }", "func (e GetEventByIDResponse_OptionValidationError) Reason() string { return e.reason }", "func (h *GenericEventHandler) GetEventResponse(event *github.Event) *EventResponse {\n\treturn &EventResponse{Message: \"Request received. Doing nothing.\"}\n}", "func (r *Response) Validate(lastID int64) error {\n\tif !(r.StatusManager == ResponseStatusReview ||\n\t\tr.StatusManager == ResponseStatusDenied ||\n\t\tr.StatusManager == ResponseStatusAccepted) {\n\t\treturn errors.New(\"wrong manager response status\")\n\t}\n\tif !(r.StatusFreelancer == ResponseStatusReview ||\n\t\tr.StatusFreelancer == ResponseStatusDenied ||\n\t\tr.StatusFreelancer == ResponseStatusAccepted ||\n\t\tr.StatusFreelancer == ResponseStatusBlock) {\n\t\treturn errors.New(\"wrong freelancer response status\")\n\t}\n\tif r.Date.IsZero() {\n\t\treturn errors.New(\"wrong date\")\n\t}\n\tif r.ID != lastID {\n\t\treturn errors.New(\"current id does not match last id\")\n\t}\n\tif r.FreelancerId == 0 || r.JobId == 0 {\n\t\treturn errors.New(\"wrong relationships between tables\")\n\t}\n\treturn nil\n}", "func (me *XsdGoPkgHasElem_QuestionIdsequenceReviewResultDetailschema_QuestionId_XsdtString_) Walk() (err error) {\n\tif fn := WalkHandlers.XsdGoPkgHasElem_QuestionIdsequenceReviewResultDetailschema_QuestionId_XsdtString_; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (*CMsgDOTASubmitTriviaQuestionAnswerResponse) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{236}\n}", "func (me *XsdGoPkgHasElems_QuestionIdsequenceReviewResultDetailschema_QuestionId_XsdtString_) Walk() (err error) {\n\tif fn := WalkHandlers.XsdGoPkgHasElems_QuestionIdsequenceReviewResultDetailschema_QuestionId_XsdtString_; me != nil {\n\t\tif fn != nil {\n\t\t\tif err = fn(me, true); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif fn != nil {\n\t\t\tif err = fn(me, false); xsdt.OnWalkError(&err, &WalkErrors, WalkContinueOnError, WalkOnError) {\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\treturn\n}", "func (m *Response) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tswitch m.Message.(type) {\n\n\tcase *Response_Scalar:\n\t\t// no validation rules for Scalar\n\n\tcase *Response_X1:\n\t\t// no validation rules for X1\n\n\tcase *Response_Data_:\n\n\t\tif v, ok := interface{}(m.GetData()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ResponseValidationError{\n\t\t\t\t\tfield: \"Data\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tcase *Response_Last_:\n\n\t\tif v, ok := interface{}(m.GetLast()).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn ResponseValidationError{\n\t\t\t\t\tfield: \"Last\",\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\tdefault:\n\t\treturn ResponseValidationError{\n\t\t\tfield: \"Message\",\n\t\t\treason: \"value is required\",\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (o *PatchEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewPatchEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewPatchEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewPatchEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 422:\n\t\tresult := NewPatchEventsEventIDUnprocessableEntity()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func DeleteQuestionHandler(w http.ResponseWriter, r *http.Request) {\n\tquestionId := r.FormValue(\"id\")\n\tif questionId == \"\" {\n\t\tfmt.Fprintln(w, \"not found id\")\n\t\treturn\n\t}\n\n\tqId, err := strconv.Atoi(questionId)\n\tif err != nil {\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tif err := db.DeleteQuestion(qId); err != nil {\n\t\thttp.Error(w, http.StatusText(http.StatusBadRequest), http.StatusBadRequest)\n\t\treturn\n\t}\n\n\tfmt.Fprintln(w, \"success\")\n}", "func (v *Value) Question() string { return v.msg.Question[0].Name }", "func (fc *FileCreate) SetPhotoSurveyQuestionID(id int) *FileCreate {\n\tfc.mutation.SetPhotoSurveyQuestionID(id)\n\treturn fc\n}", "func (e UpsertEventRequest_QuestionValidationError) Field() string { return e.field }", "func (e *EvaluationHandler) DeleteQuestion(c *gin.Context) {\n\tid := c.Params.ByName(\"id\")\n\ti, _ := strconv.Atoi(id)\n\n\te.EvaluationUsecase.DeleteQuestion(i)\n\n\t// Response\n\tmsg := \"Pertanyaan ini telah dihapus\"\n\tres := struct{}{}\n\tresponse.RespondSuccessJSON(c.Writer, res, msg)\n}", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func GetAnswersByQuestionId(c *gin.Context) {\n\t// TODO: splice in the question in response as well\n\tvar answers []models.Answer\n\tdb := c.MustGet(\"db\").(*gorm.DB)\n\tquestion_id, _ := strconv.ParseInt(c.Param(\"question_id\"), 10, 64)\n\tdb.Where(\"question_id = ?\", question_id).Find(&answers)\n\tc.JSON(http.StatusOK, gin.H{\"answers\": answers})\n}", "func (o GoogleCloudDatalabelingV1beta1HumanAnnotationConfigResponsePtrOutput) QuestionDuration() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *GoogleCloudDatalabelingV1beta1HumanAnnotationConfigResponse) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn &v.QuestionDuration\n\t}).(pulumi.StringPtrOutput)\n}", "func CreateModifyGlobalQuestionResponse() (response *ModifyGlobalQuestionResponse) {\n\tresponse = &ModifyGlobalQuestionResponse{\n\t\tBaseResponse: &responses.BaseResponse{},\n\t}\n\treturn\n}", "func (e GetEventByIDResponseValidationError) Cause() error { return e.cause }", "func UnmarshalQuestionnaireResponse(b []byte) (QuestionnaireResponse, error) {\n\tvar questionnaireResponse QuestionnaireResponse\n\tif err := json.Unmarshal(b, &questionnaireResponse); err != nil {\n\t\treturn questionnaireResponse, err\n\t}\n\treturn questionnaireResponse, nil\n}", "func (o *DeleteEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewDeleteEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewDeleteEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewDeleteEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (*CMsgClientProvideSurveyResult_Response) Descriptor() ([]byte, []int) {\n\treturn file_dota_gcmessages_client_proto_rawDescGZIP(), []int{120, 0}\n}", "func (fuo *FlowUpdateOne) AddQuestionIDs(ids ...uuid.UUID) *FlowUpdateOne {\n\tfuo.mutation.AddQuestionIDs(ids...)\n\treturn fuo\n}", "func (a *Campaigns_ChallengesApiService) GetChallengeEvent(ctx context.Context, id int64) (ChallengeEventResource, *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Get\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t \tsuccessPayload ChallengeEventResource\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/challenges/events/{id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", fmt.Sprintf(\"%v\", id), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn successPayload, nil, err\n\t}\n\n\t localVarHttpResponse, err := a.client.callAPI(r)\n\t if err != nil || localVarHttpResponse == nil {\n\t\t return successPayload, localVarHttpResponse, err\n\t }\n\t defer localVarHttpResponse.Body.Close()\n\t if localVarHttpResponse.StatusCode >= 300 {\n\t\treturn successPayload, localVarHttpResponse, reportError(localVarHttpResponse.Status)\n\t }\n\t\n\tif err = json.NewDecoder(localVarHttpResponse.Body).Decode(&successPayload); err != nil {\n\t \treturn successPayload, localVarHttpResponse, err\n\t}\n\n\n\treturn successPayload, localVarHttpResponse, err\n}", "func (o GoogleCloudDatalabelingV1beta1HumanAnnotationConfigResponseOutput) QuestionDuration() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GoogleCloudDatalabelingV1beta1HumanAnnotationConfigResponse) string { return v.QuestionDuration }).(pulumi.StringOutput)\n}", "func (s *UtteranceSpecification) SetBotResponseAudioVoiceId(v string) *UtteranceSpecification {\n\ts.BotResponseAudioVoiceId = &v\n\treturn s\n}", "func (o *EventAttributes) GetRelatedEventIdOk() (*int64, bool) {\n\tif o == nil || o.RelatedEventId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.RelatedEventId, true\n}", "func DeleteQuestion(no int) (err error) {\n\tentries := ReadQuestions()\n\tif len(entries) == 0 {\n\t\treturn ErrQLE\n\t}\n\n\tfor index, entry := range entries {\n\t\tsplEntry := strings.Split(entry[0], \"|\")\n\t\t_no, _ := strconv.Atoi(splEntry[0])\n\t\tif no == _no {\n\t\t\tentries[index][0] = \"\"\n\t\t\terr = nil\n\t\t\tbreak\n\t\t} else {\n\t\t\terr = errors.New(\"Error - Question does not exist\")\n\t\t}\n\t}\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = ReGenerateCSVQuestion(entries)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\treturn err\n}", "func (m *CreatMessageResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (ec *executionContext) _Answer_ID(ctx context.Context, field graphql.CollectedField, obj *Answer) (ret graphql.Marshaler) {\n\tdefer func() {\n\t\tif r := recover(); r != nil {\n\t\t\tec.Error(ctx, ec.Recover(ctx, r))\n\t\t\tret = graphql.Null\n\t\t}\n\t}()\n\tfc := &graphql.FieldContext{\n\t\tObject: \"Answer\",\n\t\tField: field,\n\t\tArgs: nil,\n\t\tIsMethod: false,\n\t}\n\n\tctx = graphql.WithFieldContext(ctx, fc)\n\tresTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {\n\t\tctx = rctx // use context from middleware stack in children\n\t\treturn obj.ID, nil\n\t})\n\tif err != nil {\n\t\tec.Error(ctx, err)\n\t\treturn graphql.Null\n\t}\n\tif resTmp == nil {\n\t\tif !graphql.HasFieldError(ctx, fc) {\n\t\t\tec.Errorf(ctx, \"must not be null\")\n\t\t}\n\t\treturn graphql.Null\n\t}\n\tres := resTmp.(int)\n\tfc.Result = res\n\treturn ec.marshalNInt2int(ctx, field.Selections, res)\n}", "func NewGetQuestionAnswersFromAnswerBadRequest() *GetQuestionAnswersFromAnswerBadRequest {\n\n\treturn &GetQuestionAnswersFromAnswerBadRequest{}\n}", "func (m *ConfigureAssessmentResponse) Validate() error {\n\treturn m.validate(false)\n}", "func QuestionDeleteView(req helios.Request) {\n\tuser, ok := req.GetContextData(auth.UserContextKey).(auth.User)\n\tif !ok {\n\t\treq.SendJSON(helios.ErrInternalServerError.GetMessage(), helios.ErrInternalServerError.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar eventSlug string = req.GetURLParam(\"eventSlug\")\n\tquestionNumber, errParseQuestionNumber := req.GetURLParamUint(\"questionNumber\")\n\tif errParseQuestionNumber != nil {\n\t\treq.SendJSON(errQuestionNotFound.GetMessage(), errQuestionNotFound.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar question *Question\n\tvar err helios.Error\n\tquestion, err = DeleteQuestion(user, eventSlug, questionNumber)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\tvar serializedQuestion QuestionData = SerializeQuestion(*question)\n\treq.SendJSON(serializedQuestion, http.StatusOK)\n}", "func (m *CommonResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif _, ok := CommonResponse_ResponseStatus_name[int32(m.GetStatus())]; !ok {\n\t\treturn CommonResponseValidationError{\n\t\t\tfield: \"Status\",\n\t\t\treason: \"value must be one of the defined enum values\",\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetHeaderMutation()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn CommonResponseValidationError{\n\t\t\t\tfield: \"HeaderMutation\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetBodyMutation()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn CommonResponseValidationError{\n\t\t\t\tfield: \"BodyMutation\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\tif v, ok := interface{}(m.GetTrailers()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn CommonResponseValidationError{\n\t\t\t\tfield: \"Trailers\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t// no validation rules for ClearRouteCache\n\n\treturn nil\n}", "func (fuo *FlowUpdateOne) SetSurveyID(id uuid.UUID) *FlowUpdateOne {\n\tfuo.mutation.SetSurveyID(id)\n\treturn fuo\n}", "func (responses Responses) Validate(ctx context.Context, opts ...ValidationOption) error {\n\tctx = WithValidationOptions(ctx, opts...)\n\n\tif len(responses) == 0 {\n\t\treturn errors.New(\"the responses object MUST contain at least one response code\")\n\t}\n\n\tkeys := make([]string, 0, len(responses))\n\tfor key := range responses {\n\t\tkeys = append(keys, key)\n\t}\n\tsort.Strings(keys)\n\tfor _, key := range keys {\n\t\tv := responses[key]\n\t\tif err := v.Validate(ctx); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func Question(name string, qtype uint16) MsgOpt {\n\treturn func(m *dns.Msg) { m.SetQuestion(name, qtype) }\n}", "func (mw authMiddleware) DeleteQuestion(ctx context.Context, id uint64) error {\n\trole, _, err := getRoleAndID(ctx, nil)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif *role != \"Admin\" {\n\t\treturn errAuth\n\t}\n\treturn mw.next.DeleteQuestion(ctx, id)\n}", "func (m Message) GetSecurityResponseID(f *field.SecurityResponseIDField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}", "func (e StreamEventsRequest_IdentifierValidationError) Reason() string { return e.reason }", "func (o *GetCharactersCharacterIDCalendarEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 304:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotModified()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 400:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 401:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 403:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 420:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDEnhanceYourCalm()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 500:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 503:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDServiceUnavailable()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 504:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDGatewayTimeout()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (e GetEventByIDResponse_SectionValidationError) Reason() string { return e.reason }", "func (m Message) SecurityResponseID() (*field.SecurityResponseIDField, quickfix.MessageRejectError) {\n\tf := &field.SecurityResponseIDField{}\n\terr := m.Body.Get(f)\n\treturn f, err\n}", "func (q *dnsQuestion) getQuestion(req []byte, offset uint16, head *dnsHeader) {\n\tost := offset\n\ttmp := ost\n\tost = q.getQName(req, ost)\n\tq.qType = binary.BigEndian.Uint16(req[ost : ost+twoByteSize])\n\tost += twoByteSize\n\tq.qClass = binary.BigEndian.Uint16(req[ost : ost+twoByteSize])\n\tost += twoByteSize\n\tq.head = head\n\tq.queByte = req[tmp:ost]\n}", "func (o *FormField) SetResponse(v string) {\n\to.Response = &v\n}", "func (v *MatchingPollForDecisionTaskResponse) GetNextEventID() (o int64) {\n\tif v != nil && v.NextEventID != nil {\n\t\treturn *v.NextEventID\n\t}\n\treturn\n}", "func (m *DeleteResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Message\n\n\treturn nil\n}", "func (a *Campaigns_ChallengesApiService) DeleteChallengeEvent(ctx context.Context, id int64) ( *http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Delete\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/challenges/events/{id}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"id\"+\"}\", fmt.Sprintf(\"%v\", id), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{ }\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\n\t\t\"application/json\",\n\t\t}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t localVarHttpResponse, err := a.client.callAPI(r)\n\t if err != nil || localVarHttpResponse == nil {\n\t\t return localVarHttpResponse, err\n\t }\n\t defer localVarHttpResponse.Body.Close()\n\t if localVarHttpResponse.StatusCode >= 300 {\n\t\treturn localVarHttpResponse, reportError(localVarHttpResponse.Status)\n\t }\n\n\treturn localVarHttpResponse, err\n}", "func (m *DeleteTodoResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (m *notification) ResponseCode() string {\n\treturn m.responseCodeField\n}", "func (c *ReplyController) QueryByQuestionID() {\n\tquestionID, err := c.GetInt(\"qid\", -1)\n\tif questionID != -1 && err == nil {\n\t\tc.Data[\"json\"] = models.QueryAllReplyByQuestionID(questionID)\n\t}\n\tc.ServeJSON()\n}", "func (ac *AnswerCreate) SetQuestion(q *Question) *AnswerCreate {\n\treturn ac.SetQuestionID(q.ID)\n}", "func (m *executionResponse) Validate(formats strfmt.Registry) error {\n\treturn nil\n}", "func (e GetEventByIDResponse_OptionValidationError) Cause() error { return e.cause }", "func (m *Response) GetID() uint64 {\n\treturn m.RequestID\n}", "func NewResponse(eventID string) *Response {\n\treturn &Response{\n\t\tEventID: eventID,\n\t\tStatus: shared.StatusOK,\n\t\tStarted: time.Now(),\n\t}\n}", "func (c *ClientWithResponses) GetEventWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getEventResponse, error) {\n\trsp, err := c.GetEvent(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetEventResponse(rsp)\n}", "func (db *Database) QueryAnsweredQuestionsByID(questionID int) ([]models.Question, error) {\n\tvar qna []models.Question\n\treturn qna, errors.Wrap(db.Limit(queryLimit).Model(&models.Question{}).Related(&[]models.Answer{}, \"Answers\").Preload(\"Answers\").Order(\"id DESC\").First(&qna, questionID).Error, \"Unable to query database\")\n}", "func (e GetResponseValidationError) Reason() string { return e.reason }", "func QuestionDetailView(req helios.Request) {\n\tuser, ok := req.GetContextData(auth.UserContextKey).(auth.User)\n\tif !ok {\n\t\treq.SendJSON(helios.ErrInternalServerError.GetMessage(), helios.ErrInternalServerError.GetStatusCode())\n\t\treturn\n\t}\n\tvar eventSlug string = req.GetURLParam(\"eventSlug\")\n\tquestionNumber, errParseQuestionNumber := req.GetURLParamUint(\"questionNumber\")\n\tif errParseQuestionNumber != nil {\n\t\treq.SendJSON(errQuestionNotFound.GetMessage(), errQuestionNotFound.GetStatusCode())\n\t\treturn\n\t}\n\n\tvar question *Question\n\tvar err helios.Error\n\tquestion, err = GetQuestionOfEventAndUser(user, eventSlug, questionNumber)\n\tif err != nil {\n\t\treq.SendJSON(err.GetMessage(), err.GetStatusCode())\n\t\treturn\n\t}\n\tvar serializedQuestion QuestionData = SerializeQuestion(*question)\n\treq.SendJSON(serializedQuestion, http.StatusOK)\n}", "func (m *DurationV4Response) Validate(formats strfmt.Registry) error {\n\tvar res []error\n\n\tif err := m.validateDays(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateHours(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif err := m.validateMinutes(formats); err != nil {\n\t\tres = append(res, err)\n\t}\n\n\tif len(res) > 0 {\n\t\treturn errors.CompositeValidationError(res...)\n\t}\n\treturn nil\n}", "func Answer(q string) (int, bool) {\n\tparser, err := participle.Build(&question{})\n\tif err != nil {\n\t\treturn 0, false\n\t}\n\n\tquestion := &question{}\n\terr = parser.ParseString(q, question)\n\tif err != nil {\n\t\treturn 0, false\n\t}\n\n\ta := question.eval()\n\treturn a, true\n}", "func InitResponse() ValidatorResponse {\n\tvar ve = []ValidationError{}\n\tvr := ValidatorResponse{\n\t\tMessage: \"Valid\",\n\t\tValid: true,\n\t\tErrors: ve}\n\treturn vr\n}", "func (m Message) GetRFQReqID(f *field.RFQReqIDField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}", "func (fu *FlowUpdate) AddQuestionIDs(ids ...uuid.UUID) *FlowUpdate {\n\tfu.mutation.AddQuestionIDs(ids...)\n\treturn fu\n}", "func (o *DeleteAPIV2EventsEventSubscriptionIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (m *DeleteExperimentsResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\treturn nil\n}", "func (e UpsertEventResponseValidationError) Reason() string { return e.reason }", "func (m *DeleteEmployeeResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for EmpNo\n\n\treturn nil\n}", "func (m *AssessEvidencesResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (m *AssessEvidenceResponse) Validate() error {\n\treturn m.validate(false)\n}", "func (o DatasourceResponseOutput) ResourceID() pulumi.StringOutput {\n\treturn o.ApplyT(func(v DatasourceResponse) string { return v.ResourceID }).(pulumi.StringOutput)\n}", "func QuestionPacket(domain string) Packet {\n\treturn Packet{\n\t\tHeader: Header{QDCount: 2},\n\t\tQuestions: []Question{\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: AAAA,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: A,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t},\n\t}\n}", "func QuestionPacket(domain string) Packet {\n\treturn Packet{\n\t\tHeader: Header{QDCount: 2},\n\t\tQuestions: []Question{\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: AAAA,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t\t{\n\t\t\t\tDomain: domain,\n\t\t\t\tType: A,\n\t\t\t\tClass: IN,\n\t\t\t\tUnicast: false,\n\t\t\t},\n\t\t},\n\t}\n}", "func (fc *FileCreate) SetSurveyQuestion(s *SurveyQuestion) *FileCreate {\n\treturn fc.SetSurveyQuestionID(s.ID)\n}", "func (m *RegisterResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Username\n\n\treturn nil\n}", "func ValidateCspaceEventResponse(body *CspaceEventResponse) (err error) {\n\tif body.ID == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"id\", \"body\"))\n\t}\n\tif body.Title == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"title\", \"body\"))\n\t}\n\tif body.Start == nil {\n\t\terr = goa.MergeErrors(err, goa.MissingFieldError(\"start\", \"body\"))\n\t}\n\tif body.Start != nil {\n\t\terr = goa.MergeErrors(err, goa.ValidateFormat(\"body.start\", *body.Start, goa.FormatDateTime))\n\t}\n\tif body.End != nil {\n\t\terr = goa.MergeErrors(err, goa.ValidateFormat(\"body.end\", *body.End, goa.FormatDateTime))\n\t}\n\treturn\n}", "func (r *Response) ID() string { return r.id }", "func (r *Response) ID() string { return r.id }", "func (e UpsertEventRequest_QuestionValidationError) Cause() error { return e.cause }", "func (m *StreamEventsResponse) Validate() error {\n\treturn m.validate(false)\n}" ]
[ "0.63928646", "0.60685307", "0.58770096", "0.5782947", "0.5741466", "0.56034374", "0.5359236", "0.5298844", "0.52514285", "0.52294534", "0.5211384", "0.5205384", "0.5102196", "0.5032585", "0.4945393", "0.4933553", "0.48972437", "0.48681366", "0.485097", "0.48070294", "0.47856054", "0.47665325", "0.47608766", "0.47493634", "0.47386858", "0.46999708", "0.46998054", "0.4684965", "0.467413", "0.467222", "0.46448678", "0.46408984", "0.4628192", "0.45643783", "0.45580018", "0.45555922", "0.45322415", "0.45222414", "0.45179394", "0.45163104", "0.44986653", "0.44742748", "0.44637302", "0.444038", "0.4431321", "0.44221005", "0.4403385", "0.43980336", "0.43958324", "0.43931285", "0.43858394", "0.4384453", "0.43669426", "0.43655798", "0.43617272", "0.43533117", "0.4351297", "0.43457407", "0.4342416", "0.43403625", "0.433155", "0.43280867", "0.4325907", "0.43198404", "0.43197075", "0.43196929", "0.43157342", "0.43125287", "0.43078285", "0.43070453", "0.43039143", "0.430069", "0.4300112", "0.42957056", "0.429438", "0.4288929", "0.42885804", "0.42774093", "0.4275502", "0.42714497", "0.4266772", "0.42618594", "0.42544723", "0.4252293", "0.42447367", "0.42442805", "0.4237812", "0.4234083", "0.42290697", "0.42214835", "0.4221192", "0.42210785", "0.42210785", "0.42187193", "0.42155364", "0.42086563", "0.42079586", "0.42079586", "0.42065448", "0.42021492" ]
0.7600668
0
Field function returns field value.
func (e GetEventByIDResponse_QuestionValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e GetEventByIDResponse_QuestionValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e GetEventByIDResponse_QuestionValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e GetEventByIDResponse_QuestionValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
Validate checks the field values on GetEventByIDResponse_Option with the rules defined in the proto definition for this message. If any rules are violated, an error is returned.
func (m *GetEventByIDResponse_Option) Validate() error { if m == nil { return nil } // no validation rules for Id if utf8.RuneCountInString(m.GetContent()) < 1 { return GetEventByIDResponse_OptionValidationError{ field: "Content", reason: "value length must be at least 1 runes", } } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (e GetEventByIDResponse_OptionValidationError) Reason() string { return e.reason }", "func (m *GetEventByIDResponse) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Participants\n\n\tfor idx, item := range m.GetSections() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Sections[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\tif v, ok := interface{}(m.GetUpdatedAt()).(interface{ Validate() error }); ok {\n\t\tif err := v.Validate(); err != nil {\n\t\t\treturn GetEventByIDResponseValidationError{\n\t\t\t\tfield: \"UpdatedAt\",\n\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\tcause: err,\n\t\t\t}\n\t\t}\n\t}\n\n\t// no validation rules for IsOpened\n\n\t// no validation rules for IsApproved\n\n\treturn nil\n}", "func (e GetEventByIDResponse_OptionValidationError) Cause() error { return e.cause }", "func (o *EventAttributes) GetRelatedEventIdOk() (*int64, bool) {\n\tif o == nil || o.RelatedEventId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.RelatedEventId, true\n}", "func (e GetEventByIDResponse_OptionValidationError) Field() string { return e.field }", "func (d *DHCPv4) GetOneOption(code OptionCode) []byte {\n\treturn d.Options.Get(code)\n}", "func (o *SubmitReplayRequestEntity) GetEventIdOk() (*int64, bool) {\n\tif o == nil || o.EventId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.EventId, true\n}", "func (e GetEventByIDResponse_OptionValidationError) Key() bool { return e.key }", "func (p *PollAnswerVoters) GetOption() (value []byte) {\n\tif p == nil {\n\t\treturn\n\t}\n\treturn p.Option\n}", "func (o *EventoDTO) GetIdOk() (*int64, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (dialer *DialerWS) GetOption(name string) (\n\tvalue interface{},\n\terr error) {\n\treturn nil, mangos.ErrBadOption\n}", "func (h *GenericEventHandler) GetEventResponse(event *github.Event) *EventResponse {\n\treturn &EventResponse{Message: \"Request received. Doing nothing.\"}\n}", "func (c *ClientWithResponses) GetaspecificEventWithResponse(ctx context.Context, id string) (*GetaspecificEventResponse, error) {\n\trsp, err := c.GetaspecificEvent(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetaspecificEventResponse(rsp)\n}", "func (o *BundleProductOptionRepositoryV1DeleteByIDDeleteParams) SetOptionID(optionID int64) {\n\to.OptionID = optionID\n}", "func (c *ClientWithResponses) GetEventWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getEventResponse, error) {\n\trsp, err := c.GetEvent(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetEventResponse(rsp)\n}", "func (o *DhcpRangeDataData) GetServerIdOk() (*string, bool) {\n\tif o == nil || o.ServerId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.ServerId, true\n}", "func (c *ClientWithResponses) GetEmployeesIdWithResponse(ctx context.Context, id string) (*GetEmployeesIdResponse, error) {\n\trsp, err := c.GetEmployeesId(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetEmployeesIdResponse(rsp)\n}", "func (service *Service) GetOption(key string) interface{} {\n\treturn service.Options[key]\n}", "func OptionByTestID(testID int) []Option {\n\tdb := db.Connect()\n\tdefer db.Close()\n\trows, err := db.Query(\n\t\t\"Select id, optionName, trueOption, testID, questionID, createdAt, updatedAt from \"+\n\t\t\toptionTable+\" where testID = ? \",\n\t\ttestID,\n\t)\n\tif err != nil {\n\t\thelper.LogError(err)\n\t\tlog.Fatalf(err.Error())\n\t}\n\tvar Options []Option\n\tvar Opt Option\n\tfor rows.Next() {\n\t\trows.Scan(\n\t\t\t&Opt.ID,\n\t\t\t&Opt.OptionName,\n\t\t\t&Opt.TrueOption,\n\t\t\t&Opt.TestID,\n\t\t\t&Opt.QuestionID,\n\t\t\t&Opt.CreatedAt,\n\t\t\t&Opt.UpdatedAt,\n\t\t)\n\t\tOptions = append(Options, Opt)\n\t}\n\n\treturn Options\n}", "func (o FieldResponseOutput) Options() OptionResponseArrayOutput {\n\treturn o.ApplyT(func(v FieldResponse) []OptionResponse { return v.Options }).(OptionResponseArrayOutput)\n}", "func (o *UserInvitationResponseData) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (m *UpsertEventRequest_Option) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\tif utf8.RuneCountInString(m.GetContent()) < 1 {\n\t\treturn UpsertEventRequest_OptionValidationError{\n\t\t\tfield: \"Content\",\n\t\t\treason: \"value length must be at least 1 runes\",\n\t\t}\n\t}\n\n\treturn nil\n}", "func (e GetEventByIDResponseValidationError) Reason() string { return e.reason }", "func (d *dialer) GetOption(n string) (interface{}, error) {\n\td.lock.Lock()\n\tdefer d.lock.Unlock()\n\n\tswitch n {\n\tcase mangos.OptionMaxRecvSize:\n\t\treturn d.recvMaxSize, nil\n\t}\n\treturn nil, mangos.ErrBadOption\n}", "func (pipe *PipeWS) GetOption(name string) (\n\tvalue interface{},\n\terr error) {\n\treturn nil, mangos.ErrBadOption\n}", "func (this *ResponseEvent) GetResponse() message.Response {\n\treturn this.m_response\n}", "func (o *EventAttributes) HasRelatedEventId() bool {\n\treturn o != nil && o.RelatedEventId != nil\n}", "func (o *Option) Id() int {\n if o == nil || o.Info == nil { return -1 }\n return o.Info.Id\n}", "func (app *KVStoreApplication) SetOption(req abcitypes.RequestSetOption) abcitypes.ResponseSetOption {\n\treturn abcitypes.ResponseSetOption{}\n}", "func (o *SubmitReplayRequestEntity) HasEventId() bool {\n\tif o != nil && o.EventId != nil {\n\t\treturn true\n\t}\n\n\treturn false\n}", "func (opt OptReturnKey) Option(d *bson.Document) error {\n\td.Append(bson.EC.Boolean(\"returnKey\", bool(opt)))\n\treturn nil\n}", "func (e EventHandler) GetEventbyID(ctx context.Context, params api.GetEventbyIDParams) middleware.Responder {\n\n\teventResponse := &models.EventResponse{}\n\tevent, err := e.eventService.GetEventByID(params.ID)\n\tif err != nil {\n\t\tpostErr := fmt.Errorf(\"failed to get the event: %w\", err)\n\t\tlogrus.Warnf(postErr.Error())\n\t\tgetEventErr := &models.Response{\n\t\t\tStatus: \"Failed\",\n\t\t\tCode: 400,\n\t\t\tMessage: postErr.Error(),\n\t\t}\n\t\treturn api.NewGetEventbyIDBadRequest().WithPayload(getEventErr)\n\n\t}\n\teventResponse.Response = &models.Response{\n\t\tCode: 200,\n\t\tStatus: \"Success\",\n\t\tMessage: \"Event has been fetched Successfully\",\n\t}\n\teventResponse.Event = event\n\n\tlogrus.Infoln(\"The event has been fetched:\", event)\n\n\treturn api.NewGetEventbyIDOK().WithPayload(eventResponse)\n}", "func (r *PollOption) GetID() kallax.Identifier {\n\treturn (*kallax.ULID)(&r.ID)\n}", "func (m *GetEventByIDResponse_Question) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Content\n\n\t// no validation rules for Position\n\n\t// no validation rules for Type\n\n\t// no validation rules for IsRequired\n\n\t// no validation rules for LimitedChoice\n\n\tfor idx, item := range m.GetOptions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_QuestionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Options[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (l *listener) GetOption(name string) (interface{}, error) {\n\tl.lock.Lock()\n\tdefer l.lock.Unlock()\n\tswitch name {\n\tcase mangos.OptionMaxRecvSize:\n\t\treturn l.recvMaxSize, nil\n\tcase OptionInputBufferSize:\n\t\treturn l.inputBufferSize, nil\n\tcase OptionOutputBufferSize:\n\t\treturn l.outputBufferSize, nil\n\tcase OptionSecurityDescriptor:\n\t\treturn l.securityDesc, nil\n\t}\n\treturn nil, mangos.ErrBadOption\n}", "func (s *SingleSelectQuestionRuleCategoryAutomation) SetOptionRefId(v string) *SingleSelectQuestionRuleCategoryAutomation {\n\ts.OptionRefId = &v\n\treturn s\n}", "func (m *GetEventByIDResponse_Section) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\t// no validation rules for Name\n\n\t// no validation rules for Description\n\n\t// no validation rules for Position\n\n\tfor idx, item := range m.GetQuestions() {\n\t\t_, _ = idx, item\n\n\t\tif v, ok := interface{}(item).(interface{ Validate() error }); ok {\n\t\t\tif err := v.Validate(); err != nil {\n\t\t\t\treturn GetEventByIDResponse_SectionValidationError{\n\t\t\t\t\tfield: fmt.Sprintf(\"Questions[%v]\", idx),\n\t\t\t\t\treason: \"embedded message failed validation\",\n\t\t\t\t\tcause: err,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t}\n\n\treturn nil\n}", "func (e *EDNS0_LOCAL) Option() uint16 { return e.Code }", "func (m Message) GetSecurityResponseID(f *field.SecurityResponseIDField) quickfix.MessageRejectError {\n\treturn m.Body.Get(f)\n}", "func (e *OptionListService) Get(id int) (*OptionList, *Response, error) {\n\tendpoint := fmt.Sprintf(\"/assets/optionList/%d?depth=complete\", id)\n\toptionList := &OptionList{}\n\tresp, err := e.client.getRequestDecode(endpoint, optionList)\n\treturn optionList, resp, err\n}", "func OptionsResponse(w http.ResponseWriter, r *http.Request, argv map[string]string) error {\n\tw.Header().Set(\"Allow\", \"GET,PUT,POST,DELETE,OPTIONS\")\n\n\treturn nil\n}", "func (opt OptShowRecordID) Option(d *bson.Document) error {\n\td.Append(bson.EC.Boolean(\"showRecordId\", bool(opt)))\n\treturn nil\n}", "func (o *UpdateNetRequest) GetDhcpOptionsSetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.DhcpOptionsSetId, true\n}", "func (o *GetEventLogsUsingGETParams) SetEventID(eventID strfmt.UUID) {\n\to.EventID = eventID\n}", "func (o *PatchEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewPatchEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewPatchEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewPatchEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 422:\n\t\tresult := NewPatchEventsEventIDUnprocessableEntity()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (s *BasePCREListener) ExitOption_flag(ctx *Option_flagContext) {}", "func (o *UserInvitationResponseData) HasId() bool {\n\treturn o != nil && o.Id != nil\n}", "func (o *GetProductsByIDProductOptionsByIDValuesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 404:\n\t\tresult := NewGetProductsByIDProductOptionsByIDValuesNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\tresult := NewGetProductsByIDProductOptionsByIDValuesDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *EventAttributes) GetRelatedEventId() int64 {\n\tif o == nil || o.RelatedEventId == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.RelatedEventId\n}", "func withEventID(id int) eventOption {\n\treturn func(m *EventMutation) {\n\t\tvar (\n\t\t\terr error\n\t\t\tonce sync.Once\n\t\t\tvalue *Event\n\t\t)\n\t\tm.oldValue = func(ctx context.Context) (*Event, error) {\n\t\t\tonce.Do(func() {\n\t\t\t\tif m.done {\n\t\t\t\t\terr = fmt.Errorf(\"querying old values post mutation is not allowed\")\n\t\t\t\t} else {\n\t\t\t\t\tvalue, err = m.Client().Event.Get(ctx, id)\n\t\t\t\t}\n\t\t\t})\n\t\t\treturn value, err\n\t\t}\n\t\tm.id = &id\n\t}\n}", "func (o *MemberResponse) GetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (o *UserInvitationResponseData) GetId() string {\n\tif o == nil || o.Id == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Id\n}", "func (client *RoleDefinitionsClient) getByIDHandleResponse(resp *http.Response) (RoleDefinitionsGetByIDResponse, error) {\n\tresult := RoleDefinitionsGetByIDResponse{RawResponse: resp}\n\tif err := runtime.UnmarshalAsJSON(resp, &result.RoleDefinition); err != nil {\n\t\treturn RoleDefinitionsGetByIDResponse{}, runtime.NewResponseError(err, resp)\n\t}\n\treturn result, nil\n}", "func (o *Domain) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (o *Invitation) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (o *AuthenticationResponse) GetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (h *Handle) optionGetStr(f func(*C.alpm_handle_t) *C.char) (string, error) {\n\tcStr := f(h.ptr)\n\tstr := C.GoString(cStr)\n\n\tdefer C.free(unsafe.Pointer(cStr))\n\n\tif cStr == nil {\n\t\treturn str, h.LastError()\n\t}\n\n\treturn str, nil\n}", "func (o *VerifiableAddress) GetIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (o *EventAttributes) SetRelatedEventId(v int64) {\n\to.RelatedEventId = &v\n}", "func ValidateOutputOpt(opt string) error {\n\tswitch {\n\tcase opt == tableOutput, opt == jsonOutput:\n\t\treturn nil\n\tcase strings.HasPrefix(opt, customOutput):\n\t\treturn nil\n\t}\n\treturn fmt.Errorf(\"invalid value for output: %s\", opt)\n}", "func (o *GetProductsByIDProductOptionsByIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 404:\n\t\tresult := NewGetProductsByIDProductOptionsByIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\tresult := NewGetProductsByIDProductOptionsByIDDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *SingleSelectFieldField) GetIdOk() (*int32, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (o *PatchProductsByIDProductOptionsByIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 400:\n\t\tresult := NewPatchProductsByIDProductOptionsByIDBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewPatchProductsByIDProductOptionsByIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\tresult := NewPatchProductsByIDProductOptionsByIDDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (c *ClientWithResponses) GetSectionWithResponse(\n\tctx context.Context,\n\tid string,\n) (*getSectionResponse, error) {\n\trsp, err := c.GetSection(ctx, id)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn ParseGetSectionResponse(rsp)\n}", "func (o ApiResponseOutput) Options() OptionResponseArrayOutput {\n\treturn o.ApplyT(func(v ApiResponse) []OptionResponse { return v.Options }).(OptionResponseArrayOutput)\n}", "func (o *CredentialsResponseElement) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func NewGetEventsEventIDOK() *GetEventsEventIDOK {\n\n\treturn &GetEventsEventIDOK{}\n}", "func HandleResponse(f ResponseHandler) Opt {\n\treturn func(c *Client) Opt {\n\t\told := c.handleResponse\n\t\tc.handleResponse = f\n\t\treturn HandleResponse(old)\n\t}\n}", "func (o *PaymentInitiationPaymentGetResponse) GetConsentIdOk() (*string, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn o.ConsentId.Get(), o.ConsentId.IsSet()\n}", "func (e GetEventByIDResponseValidationError) Cause() error { return e.cause }", "func (o *GetCharactersCharacterIDCalendarEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 304:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotModified()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 400:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 401:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 403:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 420:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDEnhanceYourCalm()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 500:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 503:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDServiceUnavailable()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 504:\n\t\tresult := NewGetCharactersCharacterIDCalendarEventIDGatewayTimeout()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func UseRequestIDOption(f bool) RequestIDOption {\n\treturn func(o *RequestIDOptions) *RequestIDOptions {\n\t\to.requestIDHeader = \"X-Request-Id\"\n\t\to.useRequestID = f\n\t\treturn o\n\t}\n}", "func (blockchain *Blockchain) SetOption(_ abciTypes.RequestSetOption) abciTypes.ResponseSetOption {\n\treturn abciTypes.ResponseSetOption{}\n}", "func (e UpsertEventRequest_OptionValidationError) Reason() string { return e.reason }", "func (o *SubmitReplayRequestEntity) GetEventId() int64 {\n\tif o == nil || o.EventId == nil {\n\t\tvar ret int64\n\t\treturn ret\n\t}\n\treturn *o.EventId\n}", "func (o *Venda) GetIdOk() (*int64, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (m *GetEventByIDRequest) Validate() error {\n\tif m == nil {\n\t\treturn nil\n\t}\n\n\t// no validation rules for Id\n\n\treturn nil\n}", "func (client BaseClient) GetAPIConfigsIDResponder(resp *http.Response) (result SetObject, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tclient.ByInspecting(),\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK),\n\t\tautorest.ByUnmarshallingJSON(&result.Value),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (s *BasePCREListener) ExitOption(ctx *OptionContext) {}", "func OptionByQuestionID(qID int) []Option {\n\tdb := db.Connect()\n\tdefer db.Close()\n\trows, err := db.Query(\n\t\t\"Select id, optionName, trueOption, testID, questionID, createdAt, updatedAt from \"+\n\t\t\toptionTable+\" where questionID = ? \",\n\t\tqID,\n\t)\n\tif err != nil {\n\t\thelper.LogError(err)\n\t\tlog.Fatalf(err.Error())\n\t}\n\tvar Options []Option\n\tvar Opt Option\n\tfor rows.Next() {\n\t\trows.Scan(\n\t\t\t&Opt.ID,\n\t\t\t&Opt.OptionName,\n\t\t\t&Opt.TrueOption,\n\t\t\t&Opt.TestID,\n\t\t\t&Opt.QuestionID,\n\t\t\t&Opt.CreatedAt,\n\t\t\t&Opt.UpdatedAt,\n\t\t)\n\t\tOptions = append(Options, Opt)\n\t}\n\n\treturn Options\n}", "func (o *GuardianPolicyDataData) GetServerIdOk() (*string, bool) {\n\tif o == nil || o.ServerId == nil {\n\t\treturn nil, false\n\t}\n\treturn o.ServerId, true\n}", "func (o *GetEventByUserEventIDAndEventTypeHandlerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 500:\n\t\tresult := NewGetEventByUserEventIDAndEventTypeHandlerInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tdata, err := ioutil.ReadAll(response.Body())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn nil, fmt.Errorf(\"Requested GET /event/namespaces/{namespace}/users/{userId}/eventType/{eventType}/eventId/{eventId} returns an error %d: %s\", response.Code(), string(data))\n\t}\n}", "func (o *GetMenuItemInformation200Response) GetIdOk() (*int32, bool) {\n\tif o == nil {\n\t\treturn nil, false\n\t}\n\treturn &o.Id, true\n}", "func (c *ServerNotifyRegisterWithIdCommand) GetResponseType() interface{} {\n\treturn nil\n}", "func (o *WebhooksJsonWebhook) GetEventOk() (*string, bool) {\n\tif o == nil || o.Event == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Event, true\n}", "func (f *Fifo) GetOption(opt FifoOption) ([]byte, error) {\n\tif opt == RWFifoNoBlock {\n\t\treturn nil, fmt.Errorf(\"Option %s not implemented\", opt)\n\t}\n\n\tvar data unsafe.Pointer\n\tvar dataLen C.uint\n\n\ts := C.ncs_FifoGetOption(f.handle, C.int(opt), data, &dataLen)\n\n\tif Status(s) == StatusInvalidDataLength {\n\t\treturn f.GetOptionWithByteSize(opt, fifoOptSize[opt]*uint(dataLen))\n\t}\n\n\treturn nil, fmt.Errorf(\"Failed to read %s option: %s\", opt, Status(s))\n}", "func (_m *ISession) InteractionResponseDelete(interaction *discordgo.Interaction, options ...discordgo.RequestOption) error {\n\t_va := make([]interface{}, len(options))\n\tfor _i := range options {\n\t\t_va[_i] = options[_i]\n\t}\n\tvar _ca []interface{}\n\t_ca = append(_ca, interaction)\n\t_ca = append(_ca, _va...)\n\tret := _m.Called(_ca...)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(*discordgo.Interaction, ...discordgo.RequestOption) error); ok {\n\t\tr0 = rf(interaction, options...)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (e *EDNS0_DHU) Option() uint16 { return EDNS0DHU }", "func (res *respondent) SetOption(name string, value interface{}) (err error) {\n\treturn nil\n}", "func vendParseOption(code OptionCode, data []byte) (Option, error) {\n\treturn &OptionGeneric{OptionCode: code, OptionData: data}, nil\n}", "func OptionNoneResponse(resp IOutProtocol) RouterOptionFunc {\n\treturn func(r *Router) {\n\t\tr.noneResp = resp\n\t}\n}", "func (o *InlineResponse2004People) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (e *EDNS0_DAU) Option() uint16 { return EDNS0DAU }", "func (d *DHCPv4) UpdateOption(opt Option) {\n\tif d.Options == nil {\n\t\td.Options = make(Options)\n\t}\n\td.Options.Update(opt)\n}", "func ParseGetEmployeesIdResponse(rsp *http.Response) (*GetEmployeesIdResponse, error) {\n\tbodyBytes, err := ioutil.ReadAll(rsp.Body)\n\tdefer rsp.Body.Close()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tresponse := &GetEmployeesIdResponse{\n\t\tBody: bodyBytes,\n\t\tHTTPResponse: rsp,\n\t}\n\n\tswitch {\n\tcase strings.Contains(rsp.Header.Get(\"Content-Type\"), \"json\") && rsp.StatusCode == 200:\n\t\tvar dest Party\n\t\tif err := json.Unmarshal(bodyBytes, &dest); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresponse.JSON200 = &dest\n\n\t}\n\n\treturn response, nil\n}", "func getOptionValue(options []QueryOption, key QueryOptionKey) (interface{}, bool) {\n\t//Iterate through the options to try to find the key\n\tfor _, option := range options {\n\t\tif option.Key == key {\n\t\t\treturn option.Value, true\n\t\t}\n\t}\n\n\t//Option was not found\n\treturn \"\", false\n}", "func (o *DeleteAPIV2EventsEventSubscriptionIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewDeleteAPIV2EventsEventSubscriptionIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *ProformaArray) GetIdOk() (*string, bool) {\n\tif o == nil || o.Id == nil {\n\t\treturn nil, false\n\t}\n\treturn o.Id, true\n}", "func (o MethodResponseOutput) Options() OptionResponseArrayOutput {\n\treturn o.ApplyT(func(v MethodResponse) []OptionResponse { return v.Options }).(OptionResponseArrayOutput)\n}", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }" ]
[ "0.607393", "0.59341", "0.57795984", "0.5383562", "0.53796434", "0.5337862", "0.53004616", "0.5217472", "0.51999176", "0.4909181", "0.48369288", "0.48236707", "0.48180208", "0.48076534", "0.48015097", "0.47713637", "0.47495365", "0.47237822", "0.4723019", "0.4714776", "0.4698976", "0.46976444", "0.46903166", "0.46827665", "0.4667934", "0.46641746", "0.46637282", "0.46633458", "0.4646999", "0.4640792", "0.46242163", "0.46214673", "0.46143517", "0.4599592", "0.45941305", "0.45896906", "0.45742434", "0.4563039", "0.45590812", "0.45507783", "0.45245788", "0.45162728", "0.45157683", "0.45132273", "0.45015472", "0.4489527", "0.44892964", "0.44887662", "0.44818312", "0.44813403", "0.4480998", "0.44488138", "0.44458905", "0.4443451", "0.44433892", "0.44262233", "0.44261923", "0.44247356", "0.44212234", "0.44128096", "0.43885335", "0.43882427", "0.43848717", "0.43631944", "0.43592775", "0.43582454", "0.43569073", "0.43504095", "0.43459293", "0.43388417", "0.4337183", "0.43365973", "0.43311965", "0.43255916", "0.4314008", "0.43129075", "0.43108007", "0.4310355", "0.43027997", "0.42971793", "0.42944005", "0.4293146", "0.4290921", "0.42862305", "0.4280786", "0.42784256", "0.42772666", "0.42688262", "0.42666814", "0.42652416", "0.42641193", "0.42601842", "0.4256711", "0.42551127", "0.4251036", "0.42483068", "0.4246588", "0.4245626", "0.4236991", "0.42356357" ]
0.7368818
0
Field function returns field value.
func (e GetEventByIDResponse_OptionValidationError) Field() string { return e.field }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetFieldValue(v interface{}, field string) (r string) {\n\tvar immutable reflect.Value\n\timmutable = GetReflectValue(v)\n\tval := immutable.FieldByName(field)\n\tswitch val.Kind() {\n\tcase reflect.Int64, reflect.Int32, reflect.Int:\n\t\tr = fmt.Sprintf(\"%d\", val.Int())\n\tcase reflect.Float64, reflect.Float32:\n\t\tr = fmt.Sprintf(\"%.2f\", val.Float())\n\tdefault:\n\t\t// process time\n\t\tvi := val.Interface()\n\t\tif vc, ok := vi.(time.Time); ok {\n\t\t\tr = FormatTime(vc)\n\t\t\tbreak\n\t\t}\n\t\tr = fmt.Sprintf(\"%v\", val)\n\t}\n\treturn\n}", "func (f *field) Val() interface{} {\n\treturn f.v\n}", "func (f Fields) ValueForField(fieldName string) string {\n\treturn f.ValueForFieldOfType(fieldName, \"\")\n}", "func (v *ClassValue) field(s *scope, name string) Value {\n\tfield, ok := v.Fields[name]\n\tif !ok {\n\t\tpanic(fmt.Errorf(\"ClassValue %v did not contain field %v\", v.Type().Name(), name))\n\t}\n\treturn field\n}", "func (f *Field) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (f *Fieldx) Value() interface{} {\n\treturn f.value.Interface()\n}", "func (i Item) GetField(name string) interface{} {\n\treturn getField(name, i.Payload)\n}", "func FieldValue(field *InputField) string {\n\treturn field.value\n}", "func (e RanparameterValueValidationError) Field() string { return e.field }", "func (i I)Field(r,c int, value string)string{\n return value\n}", "func (s *StructField) Field(name string) (*StructField, error) {\n\treturn Field(s.Value(), name)\n}", "func (entry *Entry) Field(name string) (value string, err error) {\n\tvalue, ok := entry.fields[name]\n\tif !ok {\n\t\terr = fmt.Errorf(\"field '%v' does not found in record %+v\", name, *entry)\n\t}\n\treturn\n}", "func (m *NodeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase node.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (u *User) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn u.ID, nil\n\tcase 1: // Name\n\t\treturn u.Name, nil\n\tcase 2: // CreatedAt\n\t\treturn u.CreatedAt, nil\n\tcase 3: // CreatedAtIso\n\t\treturn u.CreatedAtIso, nil\n\tcase 5: // MotherID\n\t\treturn u.MotherID, nil\n\tcase 7: // FatherID\n\t\treturn u.FatherID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: User'\", field.Name())\n}", "func (m *NumberTokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase numbertoken.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (f *Field) Field(name string) *Field {\n\tfield, ok := f.FieldOk(name)\n\tif !ok {\n\t\tpanic(\"field not found\")\n\t}\n\n\treturn field\n}", "func (e GetInstanceRequestValidationError) Field() string { return e.field }", "func (e RanparameterItemValidationError) Field() string { return e.field }", "func (e ApplicationPubSubValidationError) Field() string { return e.field }", "func (res Result) GetField(fields ...string) interface{} {\n\tif len(fields) == 0 {\n\t\treturn res\n\t}\n\n\treturn res.get(fields)\n}", "func (t *Type) Field(i int) *Field", "func (m *CarRepairrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase carrepairrecord.FieldDatetime:\n\t\treturn m.Datetime()\n\tcase carrepairrecord.FieldRepairdetail:\n\t\treturn m.Repairdetail()\n\tcase carrepairrecord.FieldRepaircost:\n\t\treturn m.Repaircost()\n\tcase carrepairrecord.FieldCarmaintenance:\n\t\treturn m.Carmaintenance()\n\t}\n\treturn nil, false\n}", "func (b *box) getFieldValue(x, y int) int {\n\treturn b.values[x+y*3]\n}", "func (e GetEventByIDRequestValidationError) Field() string { return e.field }", "func (msg *Message) Field(fieldName string) *Field {\n\treturn msg.fieldByName[fieldName]\n}", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetRequestValidationError) Field() string { return e.field }", "func (e GetInstanceResponseValidationError) Field() string { return e.field }", "func (e BitStringValidationError) Field() string { return e.field }", "func (e GetResponseValidationError) Field() string { return e.field }", "func (e GetApplicationPubSubRequestValidationError) Field() string { return e.field }", "func (e ResultValidationError) Field() string { return e.field }", "func (e GetEventByIDResponseValidationError) Field() string { return e.field }", "func (e RanparameterDefItemValidationError) Field() string { return e.field }", "func (e ArfcnValidationError) Field() string { return e.field }", "func (p *Pet) GetFieldValue(field *mapping.StructField) (interface{}, error) {\n\tswitch field.Index[0] {\n\tcase 0: // ID\n\t\treturn p.ID, nil\n\tcase 1: // Name\n\t\treturn p.Name, nil\n\tcase 3: // OwnerID\n\t\treturn p.OwnerID, nil\n\t}\n\treturn nil, errors.Wrapf(mapping.ErrInvalidModelField, \"provided invalid field: '%s' for given model: Pet'\", field.Name())\n}", "func (e RanparameterIdValidationError) Field() string { return e.field }", "func (e RetrieveResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldBequipment:\n\t\treturn m.Bequipment()\n\tcase repairinvoice.FieldEmtell:\n\t\treturn m.Emtell()\n\tcase repairinvoice.FieldNum:\n\t\treturn m.Num()\n\t}\n\treturn nil, false\n}", "func (m *CleaningroomMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase cleaningroom.FieldNote:\n\t\treturn m.Note()\n\tcase cleaningroom.FieldDateandstarttime:\n\t\treturn m.Dateandstarttime()\n\tcase cleaningroom.FieldPhonenumber:\n\t\treturn m.Phonenumber()\n\tcase cleaningroom.FieldNumofem:\n\t\treturn m.Numofem()\n\t}\n\treturn nil, false\n}", "func Field(name, from, reference string) (string, error) {\n\treturn makeRequest(\"field\", name, from, reference)\n}", "func (e GetMovableObjectRequestValidationError) Field() string { return e.field }", "func (e ResolveResponseValidationError) Field() string { return e.field }", "func (e PublishResponseValidationError) Field() string { return e.field }", "func (e GetMessageRequestValidationError) Field() string { return e.field }", "func (e GetMessageResponseValidationError) Field() string { return e.field }", "func (m *RepairinvoiceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairinvoice.FieldSymptomid:\n\t\treturn m.Symptomid()\n\tcase repairinvoice.FieldDeviceid:\n\t\treturn m.Deviceid()\n\tcase repairinvoice.FieldUserid:\n\t\treturn m.Userid()\n\tcase repairinvoice.FieldStatusrepairid:\n\t\treturn m.Statusrepairid()\n\t}\n\treturn nil, false\n}", "func (e SimpleRequestValidationError) Field() string { return e.field }", "func (e CacheValidationError) Field() string { return e.field }", "func (e PciValidationError) Field() string { return e.field }", "func (e ChannelPayRequestValidationError) Field() string { return e.field }", "func (e GetMovableObjectResponseValidationError) Field() string { return e.field }", "func (e RetrieveRequestValidationError) Field() string { return e.field }", "func (m *ExchangeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase exchange.FieldCode:\n\t\treturn m.Code()\n\tcase exchange.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e PublishRequestValidationError) Field() string { return e.field }", "func (m *PetruleMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase petrule.FieldPetrule:\n\t\treturn m.Petrule()\n\t}\n\treturn nil, false\n}", "func (e GitopsCFValidationError) Field() string { return e.field }", "func (e SimpleResponseValidationError) Field() string { return e.field }", "func (e ChannelPayResponseValidationError) Field() string { return e.field }", "func (f *Field) Get(l *Location) (string, error) {\n\tif l.Comp == -1 {\n\t\treturn string(f.Value), nil\n\t}\n\tcomp, err := f.Component(l.Comp)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\treturn comp.Get(l)\n}", "func (m *RepairingMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase repairing.FieldRepairpart:\n\t\treturn m.Repairpart()\n\t}\n\treturn nil, false\n}", "func (e RanfunctionNameValidationError) Field() string { return e.field }", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldPrice:\n\t\treturn m.Price()\n\tcase bill.FieldTime:\n\t\treturn m.Time()\n\t}\n\treturn nil, false\n}", "func (m *EventRSVPMutation) Field(name string) (ent.Value, bool) {\n\treturn nil, false\n}", "func Field(v interface{}, name string) (*Fieldx, bool) {\n\treturn New(v).Field(name)\n}", "func (e GetStreamRequestValidationError) Field() string { return e.field }", "func (e RdsValidationError) Field() string { return e.field }", "func (f *TagField) Value() string {\n\treturn f.value\n}", "func (m *LeaseMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lease.FieldAddedtime:\n\t\treturn m.Addedtime()\n\tcase lease.FieldTenant:\n\t\treturn m.Tenant()\n\tcase lease.FieldNumbtenant:\n\t\treturn m.Numbtenant()\n\tcase lease.FieldIdtenant:\n\t\treturn m.Idtenant()\n\tcase lease.FieldAgetenant:\n\t\treturn m.Agetenant()\n\t}\n\treturn nil, false\n}", "func (e RetrieveCurrentRequestValidationError) Field() string { return e.field }", "func (fn AdapterFunc) Field(fieldpath []string) (string, bool) {\n\treturn fn(fieldpath)\n}", "func (e EarfcnValidationError) Field() string { return e.field }", "func (e Response_DataValidationError) Field() string { return e.field }", "func (e ScopedRdsValidationError) Field() string { return e.field }", "func (e ResolveRequestValidationError) Field() string { return e.field }", "func (e PaymentInputValidationError) Field() string { return e.field }", "func (m *PatientrecordMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrecord.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (e BatchGetResponseValidationError) Field() string { return e.field }", "func (i *Item) GetValue(field string) string {\n\tif i == nil || len(i.Fields) == 0 {\n\t\treturn \"\"\n\t}\n\n\tsectionFilter := false\n\tsectionLabel := \"\"\n\tfieldLabel := field\n\tif strings.Contains(field, \".\") {\n\t\tparts := strings.Split(field, \".\")\n\n\t\t// Test to make sure the . isn't the last character\n\t\tif len(parts) == 2 {\n\t\t\tsectionFilter = true\n\t\t\tsectionLabel = parts[0]\n\t\t\tfieldLabel = parts[1]\n\t\t}\n\t}\n\n\tfor _, f := range i.Fields {\n\t\tif sectionFilter {\n\t\t\tif f.Section != nil {\n\t\t\t\tif sectionLabel != i.SectionLabelForID(f.Section.ID) {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif fieldLabel == f.Label {\n\t\t\treturn f.Value\n\t\t}\n\t}\n\n\treturn \"\"\n}", "func (m *RoomInfoMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase roominfo.FieldInfo:\n\t\treturn m.Info()\n\t}\n\treturn nil, false\n}", "func (m *TokenMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase token.FieldCreatedAt:\n\t\treturn m.CreatedAt()\n\tcase token.FieldUpdatedAt:\n\t\treturn m.UpdatedAt()\n\tcase token.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *ResourceMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase resource.FieldName:\n\t\treturn m.Name()\n\tcase resource.FieldType:\n\t\treturn m.GetType()\n\t}\n\treturn nil, false\n}", "func (e MovableObjectValidationError) Field() string { return e.field }", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase card.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase card.FieldSuit:\n\t\treturn m.Suit()\n\tcase card.FieldValue:\n\t\treturn m.Value()\n\t}\n\treturn nil, false\n}", "func (m *EventMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase event.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *BillMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase bill.FieldQuantity:\n\t\treturn m.Quantity()\n\tcase bill.FieldAddedTime:\n\t\treturn m.AddedTime()\n\t}\n\treturn nil, false\n}", "func (m *StreetMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase street.FieldName:\n\t\treturn m.Name()\n\t}\n\treturn nil, false\n}", "func (m *LengthtimeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase lengthtime.FieldLengthtime:\n\t\treturn m.Lengthtime()\n\t}\n\treturn nil, false\n}", "func (e AssessmentResultValidationError) Field() string { return e.field }", "func (s UserSet) FieldGet(field models.FieldName) *models.FieldInfo {\n\tres := s.Collection().Call(\"FieldGet\", field)\n\tresTyped, _ := res.(*models.FieldInfo)\n\treturn resTyped\n}", "func (e GetUserResponseValidationError) Field() string { return e.field }", "func (m *PatientrightsMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase patientrights.FieldPermissionDate:\n\t\treturn m.PermissionDate()\n\t}\n\treturn nil, false\n}", "func (e GetStreamResponseValidationError) Field() string { return e.field }", "func (m *EquipmentrentalMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase equipmentrental.FieldRENTALAMOUNT:\n\t\treturn m.RENTALAMOUNT()\n\tcase equipmentrental.FieldRENTALDATE:\n\t\treturn m.RENTALDATE()\n\tcase equipmentrental.FieldRETURNDATE:\n\t\treturn m.RETURNDATE()\n\t}\n\treturn nil, false\n}", "func (f *FieldHandler) Value(initZero bool) reflect.Value {\n\treturn f.field.reflectValueGetter(f.expr.ptr, initZero)\n}", "func (m *PurposeMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase purpose.FieldObjective:\n\t\treturn m.Objective()\n\t}\n\treturn nil, false\n}", "func (e ApplicationPubSubsValidationError) Field() string { return e.field }", "func (f Unstructured) Field(field string) Fragment {\n\tif f.fields != nil {\n\t\treturn f.fields[field]\n\t}\n\treturn nil\n}", "func (e BodyResponseValidationError) Field() string { return e.field }", "func (m *CarMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase car.FieldCreateTime:\n\t\treturn m.CreateTime()\n\tcase car.FieldUpdateTime:\n\t\treturn m.UpdateTime()\n\tcase car.FieldModel:\n\t\treturn m.Model()\n\tcase car.FieldRegisteredAt:\n\t\treturn m.RegisteredAt()\n\t}\n\treturn nil, false\n}", "func (m *CardMutation) Field(name string) (ent.Value, bool) {\n\tswitch name {\n\tcase card.FieldNumber:\n\t\treturn m.Number()\n\tcase card.FieldName:\n\t\treturn m.Name()\n\tcase card.FieldOwnerID:\n\t\treturn m.OwnerID()\n\t}\n\treturn nil, false\n}" ]
[ "0.71079886", "0.705458", "0.70306563", "0.70252305", "0.6945119", "0.69039124", "0.689789", "0.68854237", "0.68611896", "0.68137765", "0.6811531", "0.67632294", "0.6716657", "0.67018616", "0.66822076", "0.6671346", "0.66659707", "0.6661343", "0.66608155", "0.6660421", "0.665608", "0.6647752", "0.66360617", "0.6625801", "0.6617159", "0.66153616", "0.66153616", "0.661111", "0.6608895", "0.66083837", "0.6604208", "0.66008335", "0.65927887", "0.6587402", "0.65803015", "0.65671533", "0.6567071", "0.6564914", "0.65632343", "0.65630984", "0.654184", "0.6536053", "0.6530546", "0.6530526", "0.6528864", "0.65260595", "0.65179527", "0.6516745", "0.6516154", "0.6510159", "0.6510078", "0.65042776", "0.6501439", "0.6499975", "0.64988506", "0.649665", "0.6496221", "0.64947623", "0.649354", "0.6489089", "0.6488793", "0.64882225", "0.64859617", "0.6483642", "0.6479889", "0.64790434", "0.6472379", "0.6465228", "0.6459204", "0.6457627", "0.6452723", "0.64507645", "0.64495903", "0.64487314", "0.6448028", "0.64479464", "0.64474", "0.64456683", "0.64455897", "0.6444573", "0.64437336", "0.6443306", "0.6441888", "0.6441613", "0.6441039", "0.6439085", "0.6438874", "0.6434375", "0.64315784", "0.6430702", "0.6429934", "0.64209116", "0.6417538", "0.64174324", "0.6417134", "0.6411201", "0.64086837", "0.6406251", "0.6405251", "0.6404929", "0.64009386" ]
0.0
-1
Reason function returns reason value.
func (e GetEventByIDResponse_OptionValidationError) Reason() string { return e.reason }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetReason(from Getter, t string) string {\n\tif c := Get(from, t); c != nil {\n\t\treturn c.Reason\n\t}\n\treturn \"\"\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func GetReason(message report.IMessage) int32 {\n\tswitch message.MessageType() {\n\tcase \"+RSP\", \"+BSP\":\n\t\treturn getLocationReason(message)\n\tcase \"+EVT\", \"+BVT\":\n\t\treturn getEventCode(message)\n\n\t}\n\treturn int32(6)\n}", "func (b *Base) GetReason() string {\n\treturn b.Reason\n}", "func (o ValidationOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v Validation) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (s *Subscription) GetReason() string {\n\tif s == nil || s.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *s.Reason\n}", "func GetReason(message report.IMessage) int32 {\n\tidt, found := message.GetValue(fields.DeviceType)\n\tif !found {\n\t\treturn 6 //periodical\n\t}\n\n\tdeviceType, valid := idt.(byte)\n\tif !valid {\n\t\treturn 6 //periodical\n\t}\n\n\tswitch deviceType {\n\tcase devicetypes.GV320:\n\t\treturn gv300.GetReason(message)\n\n\tcase devicetypes.GV55, devicetypes.GV55N:\n\t\treturn gv55.GetReason(message)\n\n\tcase devicetypes.GV55Lite, devicetypes.GV55NLite:\n\t\treturn gv55.GetReasonLite(message)\n\n\tcase devicetypes.GV75, devicetypes.GV75W:\n\t\treturn gv75.GetReason(message)\n\n\tcase devicetypes.GV55W:\n\t\treturn gv55w.GetReason(message)\n\n\tcase devicetypes.GV600W:\n\t\treturn gv600.GetReason(message)\n\tcase devicetypes.GV300W:\n\t\treturn gv300w.GetReason(message)\n\tdefault:\n\t\treturn gv55.GetReason(message)\n\t}\n}", "func (e MessageDValidationError) Reason() string { return e.reason }", "func (o LienOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *Lien) pulumi.StringOutput { return v.Reason }).(pulumi.StringOutput)\n}", "func (e BitStringValidationError) Reason() string { return e.reason }", "func (o JobConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func Reason(v string) predicate.ProfileUKM {\n\treturn predicate.ProfileUKM(func(s *sql.Selector) {\n\t\ts.Where(sql.EQ(s.C(FieldReason), v))\n\t})\n}", "func (e MessageFValidationError) Reason() string { return e.reason }", "func (o ValidationPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ValidationPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e ActiveHealthCheckValidationError) Reason() string { return e.reason }", "func (o *SecurityProblemEvent) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e EutracgiValidationError) Reason() string { return e.reason }", "func (resp *Response) Reason() string {\n\treturn resp.Status\n}", "func (n *Notification) GetReason() string {\n\tif n == nil || n.Reason == nil {\n\t\treturn \"\"\n\t}\n\treturn *n.Reason\n}", "func (s *SessionTrackerV1) GetReason() string {\n\treturn s.Spec.Reason\n}", "func (e MessageEValidationError) Reason() string { return e.reason }", "func (e RequirementRuleValidationError) Reason() string { return e.reason }", "func Reason(err error) string {\n\tif err == nil {\n\t\treturn \"\"\n\t}\n\tif reasoner, ok := err.(Reasoner); ok {\n\t\treturn reasoner.Reason()\n\t}\n\treturn \"\"\n}", "func (o MachineInstanceStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v MachineInstanceStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e NrtValidationError) Reason() string { return e.reason }", "func (o BuildStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e GetMessageResponseValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusConditionsOutput) Reason() pulumi.StringOutput {\n\treturn o.ApplyT(func(v ApplicationStatusConditions) string { return v.Reason }).(pulumi.StringOutput)\n}", "func (e PassiveHealthCheckValidationError) Reason() string { return e.reason }", "func (e CardValidationError) Reason() string { return e.reason }", "func (e StatsdValidationError) Reason() string { return e.reason }", "func (e PciValidationError) Reason() string { return e.reason }", "func (o ApplicationStatusWorkflowStepsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApplicationStatusWorkflowSteps) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o *AccessRequestData) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e LanguageValidationError) Reason() string { return e.reason }", "func (e CreditValidationError) Reason() string { return e.reason }", "func (e PaymentValidationError) Reason() string { return e.reason }", "func (e ResponseValidationError) Reason() string { return e.reason }", "func (e RdsValidationError) Reason() string { return e.reason }", "func (e CardHolderValidationError) Reason() string { return e.reason }", "func (e ActionValidationError) Reason() string { return e.reason }", "func (e SimpleResponseValidationError) Reason() string { return e.reason }", "func (e StatusResponseValidationError) Reason() string { return e.reason }", "func (o *V0037Node) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e ChannelPayRequestValidationError) Reason() string { return e.reason }", "func (e ChannelPayResponseValidationError) Reason() string { return e.reason }", "func (e RicControlMessagePriorityValidationError) Reason() string { return e.reason }", "func (e MaxPciValidationError) Reason() string { return e.reason }", "func (e LivenessResponseValidationError) Reason() string { return e.reason }", "func (e MaxPlmnValidationError) Reason() string { return e.reason }", "func (e SimpleRequestValidationError) Reason() string { return e.reason }", "func (e MessageCValidationError) Reason() string { return e.reason }", "func (se *StatusError) Reason() string {\n\treturn se.message\n}", "func (o *DeploymentsCondition) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e SkillValidationError) Reason() string { return e.reason }", "func (e GetDisscusRespValidationError) Reason() string { return e.reason }", "func (o BuildStatusPtrOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *BuildStatus) *string {\n\t\tif v == nil {\n\t\t\treturn nil\n\t\t}\n\t\treturn v.Reason\n\t}).(pulumi.StringPtrOutput)\n}", "func (c *ContainerStatusResolver) Reason() *string {\n\treturn c.reason\n}", "func (e EarfcnValidationError) Reason() string { return e.reason }", "func (e CalculateComplianceRequestValidationError) Reason() string { return e.reason }", "func (_this *CrashReportBody) Reason() *string {\n\tvar ret *string\n\tvalue := _this.Value_JS.Get(\"reason\")\n\tif value.Type() != js.TypeNull && value.Type() != js.TypeUndefined {\n\t\t__tmp := (value).String()\n\t\tret = &__tmp\n\t}\n\treturn ret\n}", "func (e HealthCheck_PayloadValidationError) Reason() string { return e.reason }", "func (e RetrieveMyCardsResponseValidationError) Reason() string { return e.reason }", "func (e CommonResponseValidationError) Reason() string { return e.reason }", "func (e GetMessageRequestValidationError) Reason() string { return e.reason }", "func (o StorageClusterStatusConditionsOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StorageClusterStatusConditions) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e StateMachineResponseValidationError) Reason() string { return e.reason }", "func (e ArfcnValidationError) Reason() string { return e.reason }", "func (e NetworkPolicyValidationError) Reason() string { return e.reason }", "func (o *DataPlaneClusterUpdateStatusRequestConditions) GetReason() string {\n\tif o == nil || o.Reason == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\treturn *o.Reason\n}", "func (e MetricValidationError) Reason() string { return e.reason }", "func (o BuildRunStatusOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v BuildRunStatus) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e RecoverableError) Reason() string {\n\treturn e.reason\n}", "func (e MaxofMessageProtocolTestsValidationError) Reason() string { return e.reason }", "func (e ChannelNotifyResponseValidationError) Reason() string { return e.reason }", "func (e ResultValidationError) Reason() string { return e.reason }", "func (e TestSpecificationValidationError) Reason() string { return e.reason }", "func (e NonRecoverableError) Reason() string {\n\treturn e.reason\n}", "func (o JobStatusErrorOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v JobStatusError) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (a Acknowledgement) Reason() error {\n\tswitch {\n\tcase a.State == ACK:\n\t\treturn nil\n\tcase a.State == NACK:\n\t\treturn errors.New(string(a.Message))\n\tdefault:\n\t\treturn errors.New(\"unknown acknowledgement status\")\n\t}\n}", "func (e UpdateMessageResponseValidationError) Reason() string { return e.reason }", "func (e WordValidationError) Reason() string { return e.reason }", "func (e GetDisscusReqValidationError) Reason() string { return e.reason }", "func (e CreatMessageResponseValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionPatchOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerConditionPatch) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e MetricImplementationValidationError) Reason() string { return e.reason }", "func (e CiliumCFValidationError) Reason() string { return e.reason }", "func (e FilterStateRuleValidationError) Reason() string { return e.reason }", "func (e CreateDisscusRespValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e HealthCheckValidationError) Reason() string { return e.reason }", "func (e TwoOneofsValidationError) Reason() string { return e.reason }", "func (e AdminValidationError) Reason() string { return e.reason }", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (o HorizontalPodAutoscalerConditionOutput) Reason() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v HorizontalPodAutoscalerCondition) *string { return v.Reason }).(pulumi.StringPtrOutput)\n}", "func (e LivenessRequestValidationError) Reason() string { return e.reason }", "func (r *ReportStoryRequest) GetReason() (value ReportReasonClass) {\n\tif r == nil {\n\t\treturn\n\t}\n\treturn r.Reason\n}", "func (e AssessmentResultValidationError) Reason() string { return e.reason }", "func (e L7NetworkPolicyRuleValidationError) Reason() string { return e.reason }", "func (e NrarfcnValidationError) Reason() string { return e.reason }" ]
[ "0.78512263", "0.7759013", "0.7759013", "0.758723", "0.74332446", "0.74091107", "0.740494", "0.73673135", "0.73432285", "0.7330937", "0.7329657", "0.73138005", "0.72980094", "0.7293151", "0.72837216", "0.7275913", "0.7252345", "0.7230593", "0.72234565", "0.7222608", "0.7196587", "0.7186926", "0.7177811", "0.71720684", "0.71702856", "0.7168882", "0.7168033", "0.71623784", "0.7160162", "0.7157901", "0.7156796", "0.71499187", "0.71483266", "0.71435404", "0.7138927", "0.7134093", "0.7131485", "0.71212435", "0.7113703", "0.71134007", "0.7110416", "0.71102226", "0.71073544", "0.71044487", "0.7097571", "0.709562", "0.70931906", "0.7092116", "0.7085098", "0.70789874", "0.7077606", "0.707535", "0.7071573", "0.706842", "0.7067343", "0.70658314", "0.7065663", "0.70604813", "0.70554", "0.70413375", "0.7038985", "0.7036392", "0.70291436", "0.70268923", "0.7026706", "0.70261866", "0.7018986", "0.7011388", "0.70111495", "0.7009085", "0.7005406", "0.70025146", "0.7000965", "0.69991565", "0.6995616", "0.6992607", "0.6992276", "0.69910586", "0.6989737", "0.69873315", "0.6984515", "0.6983248", "0.6979003", "0.6976954", "0.69759", "0.69759", "0.6974406", "0.69741553", "0.6972589", "0.69723344", "0.69695055", "0.69695055", "0.69690573", "0.69686645", "0.69659555", "0.69659555", "0.69656986", "0.69630307", "0.69612694", "0.69515", "0.69511986" ]
0.0
-1
Cause function returns cause value.
func (e GetEventByIDResponse_OptionValidationError) Cause() error { return e.cause }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Cause(err error) error {\n\tswitch err.(type) {\n\tcase Causable:\n\t\treturn err.(Causable).Cause()\n\t}\n\treturn nil\n}", "func (e errWithCause) Cause() error {\n\treturn e.cause\n}", "func Cause(e error) error {\n\tswitch e := e.(type) {\n\tcase *wrap:\n\t\treturn e.Cause()\n\tcase UserError:\n\t\treturn e.Cause()\n\tdefault:\n\t\treturn e\n\t}\n}", "func (e *Error) Cause() error {\n\treturn e.Unwrap()\n}", "func (e *wrap) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\treturn errors.Cause(err)\n}", "func Cause(err error) error {\n\tif err == nil {\n\t\treturn nil\n\t}\n\tif e, ok := err.(iCause); ok {\n\t\treturn e.Cause()\n\t}\n\tif e, ok := err.(iNext); ok {\n\t\treturn Cause(e.Next())\n\t}\n\tif e, ok := err.(iUnwrap); ok {\n\t\treturn Cause(e.Unwrap())\n\t}\n\treturn err\n}", "func (e *Error) Cause() error {\n\treturn e.err\n}", "func (e *errorT) Cause() error {\n\treturn e.err\n}", "func (s *Error) Cause() error {\n\treturn s.underlying\n}", "func (e *Error) Cause() error {\n\treturn e.Err\n}", "func (ec Error) Cause() error {\n\treturn ec.error\n}", "func Cause(err error) error {\n\tif err, ok := err.(*wrappedError); ok {\n\t\treturn err.Cause()\n\t}\n\treturn err\n}", "func (e *Err) Cause() error {\n\treturn e.cause\n}", "func Cause(err error) error {\n\ttype causer interface {\n\t\tCause() error\n\t}\n\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\treturn err\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn nil\n}", "func (e Error) Cause() error {\n\treturn e.cause\n}", "func (e *RunError) Cause() error {\n\tif e.Inner != nil {\n\t\treturn e.Inner\n\t}\n\treturn e\n}", "func (e *wrappedError) Cause() error {\n\tif e.previous == nil {\n\t\treturn e\n\t}\n\tswitch err := e.previous.(type) {\n\tcase *wrappedError:\n\t\treturn err.Cause()\n\tdefault:\n\t\treturn err\n\t}\n}", "func Cause(err error) error {\n\tvar (\n\t\tcauser Causer\n\t\tok bool\n\t)\n\tfor err != nil {\n\t\tcauser, ok = err.(Causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = causer.Cause()\n\t}\n\treturn err\n}", "func (e *OpError) Cause() error {\n\treturn e.Err\n}", "func (err *gooseError) Cause() error {\n\treturn err.cause\n}", "func (e *detailedError) Cause() error {\n\treturn e.cause\n}", "func (err *ExitError) Cause() error {\n\treturn err.Err\n}", "func (ce *ClientError) Cause() error {\n\treturn ce.err\n}", "func Cause(err error) error {\n\tif w, ok := err.(*Wrapped); ok {\n\t\t// if root level error\n\t\tif len(w.Errors) > 0 {\n\t\t\treturn w.Errors[0]\n\t\t}\n\t\t// already extracted error\n\t\treturn w\n\t}\n\treturn err\n}", "func Cause(err error) (error, bool) { // nolint: golint, staticcheck, stylecheck\n\terrWithContext, ok := err.(ContextError)\n\tif !ok {\n\t\treturn nil, false\n\t}\n\treturn errWithContext.Cause(), true\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\te, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = e.Cause()\n\t}\n\treturn err\n}", "func Cause(err error) error {\n\tfor err != nil {\n\t\tcause, ok := err.(causer)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Cause()\n\t}\n\treturn err\n}", "func (e UnencodableValue) Cause() error {\n\treturn e.Err\n}", "func Cause(err error) error {\n\ttype wrapper interface {\n\t\tUnwrap() error\n\t}\n\tfor err != nil {\n\t\tcause, ok := err.(wrapper)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = cause.Unwrap()\n\t}\n\treturn err\n}", "func (w *pipeError) Cause() error { return errors.Cause(w.error) }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e *opaqueWrapper) Cause() error { return e.cause }", "func (e ResolveRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor {\n\t\tuerr := Unwrap(err)\n\t\tif uerr == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = uerr\n\t}\n}", "func Cause(err error) error {\n\tfor {\n\t\tif e, ok := err.(errorCause); ok {\n\t\t\tif cause := e.Cause(); cause != nil {\n\t\t\t\terr = cause\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n}", "func (e InternalUpstreamTransportValidationError) Cause() error { return e.cause }", "func (e EutracgiValidationError) Cause() error { return e.cause }", "func (w *withCode) Cause() error { return w.cause }", "func (e UpsertEventRequestValidationError) Cause() error { return e.cause }", "func (e PciValidationError) Cause() error { return e.cause }", "func (e NoOneofsValidationError) Cause() error { return e.cause }", "func (e SimpleRequestValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tmrpErr, ok := err.(Error)\n\t\tif !ok {\n\t\t\tbreak\n\t\t}\n\t\terr = gErrors.Cause(mrpErr.originalError)\n\t}\n\treturn err\n}", "func (e *withDomain) Cause() error { return e.cause }", "func (e LoggingValidationError) Cause() error { return e.cause }", "func (e CiliumCFValidationError) Cause() error { return e.cause }", "func (e AssessmentResultValidationError) Cause() error { return e.cause }", "func (e LoggingCFValidationError) Cause() error { return e.cause }", "func Cause(err error) error {\n\tfor err != nil {\n\t\tunwraped := errors.Unwrap(err)\n\t\tif unwraped == nil {\n\t\t\treturn err\n\t\t}\n\t\terr = unwraped\n\t}\n\treturn err\n}", "func (e NrtValidationError) Cause() error { return e.cause }", "func (e ResolveResponseValidationError) Cause() error { return e.cause }", "func (e StateChangeValidationError) Cause() error { return e.cause }", "func (e SXGValidationError) Cause() error { return e.cause }", "func (e EutracellIdentityValidationError) Cause() error { return e.cause }", "func (e WorkflowComponentValidationError) Cause() error { return e.cause }", "func (e MessageFValidationError) Cause() error { return e.cause }", "func (e EarfcnValidationError) Cause() error { return e.cause }", "func (e ActiveHealthCheckValidationError) Cause() error { return e.cause }", "func Cause(e interface{}) ECode {\n\tif e == nil {\n\t\treturn &ecode{code: 0}\n\t}\n\tif str, ok := e.(string); ok {\n\t\treturn &ecode{code: 500, message: str}\n\t}\n\terr, ok := e.(error)\n\tif !ok {\n\t\treturn &ecode{code: 500, message: reflect.TypeOf(e).Name()}\n\t}\n\tec, ok := errors.Cause(err).(ECode)\n\tif ok {\n\t\treturn ec\n\t}\n\treturn &ecode{code: 500, message: err.Error()}\n}", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e UpstreamValidationError) Cause() error { return e.cause }", "func (e TransactionValidationError) Cause() error { return e.cause }", "func (e MessageCValidationError) Cause() error { return e.cause }", "func WithCause(err, cause error) error {\n\treturn errWithCause{\n\t\terror: err,\n\t\tcause: cause,\n\t}\n}", "func (e ActionValidationError) Cause() error { return e.cause }", "func (e AssessEvidenceRequestValidationError) Cause() error { return e.cause }", "func (e Upstream_TimeoutValidationError) Cause() error { return e.cause }", "func (e BootstrapValidationError) Cause() error { return e.cause }", "func (e TwoValidOneofsValidationError) Cause() error { return e.cause }", "func (e RdsValidationError) Cause() error { return e.cause }", "func (e MaxPciValidationError) Cause() error { return e.cause }", "func (e AdminValidationError) Cause() error { return e.cause }", "func (e RequirementRuleValidationError) Cause() error { return e.cause }", "func (e ResultValidationError) Cause() error { return e.cause }", "func (e InternalUpstreamTransport_MetadataValueSourceValidationError) Cause() error { return e.cause }", "func (e MaintemplateComponentValidationError) Cause() error { return e.cause }", "func (e RedactedValidationError) Cause() error { return e.cause }", "func (e CreatMessageRequestValidationError) Cause() error { return e.cause }", "func (e NrcgiValidationError) Cause() error { return e.cause }", "func (e UpsertEventResponseValidationError) Cause() error { return e.cause }", "func (e NrarfcnValidationError) Cause() error { return e.cause }", "func (e TwoOneofsValidationError) Cause() error { return e.cause }", "func (e PassiveHealthCheckValidationError) Cause() error { return e.cause }", "func (e MessageEValidationError) Cause() error { return e.cause }", "func (e GetEventByIDRequestValidationError) Cause() error { return e.cause }", "func (e ArfcnValidationError) Cause() error { return e.cause }", "func (e TenantValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e GetRequestValidationError) Cause() error { return e.cause }", "func (e StateValidationError) Cause() error { return e.cause }", "func (e MinioComponentValidationError) Cause() error { return e.cause }", "func (e LatencyFaultValidationError) Cause() error { return e.cause }", "func (e GetDisscusReqValidationError) Cause() error { return e.cause }", "func (e UpdateTodoRequestValidationError) Cause() error { return e.cause }", "func (e ManifestProjectCFValidationError) Cause() error { return e.cause }" ]
[ "0.8261931", "0.79593104", "0.7896341", "0.7866004", "0.77969515", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710267", "0.7710245", "0.76848143", "0.7658625", "0.76571184", "0.7650075", "0.76476574", "0.7625474", "0.7623792", "0.7621357", "0.7582015", "0.74775916", "0.74656785", "0.7424877", "0.7423645", "0.7384076", "0.73215586", "0.7306271", "0.7286286", "0.72688353", "0.7258698", "0.7210708", "0.7192562", "0.7107885", "0.7104621", "0.7038758", "0.701369", "0.701369", "0.69629866", "0.6927608", "0.692207", "0.69208515", "0.68938124", "0.6858123", "0.684976", "0.6846449", "0.6830235", "0.6825922", "0.68016034", "0.6800864", "0.6791525", "0.6778742", "0.67324674", "0.673176", "0.67316306", "0.6729585", "0.67155087", "0.6714904", "0.67148", "0.66955864", "0.668878", "0.66879916", "0.66822165", "0.66821957", "0.66791916", "0.6673011", "0.6673011", "0.6668595", "0.66512465", "0.66507614", "0.66484874", "0.6636346", "0.6633876", "0.66313785", "0.66304046", "0.6622965", "0.66204447", "0.6618046", "0.6617173", "0.66125673", "0.66055393", "0.6603956", "0.66004616", "0.6600119", "0.6587435", "0.6580937", "0.6578089", "0.6569218", "0.656675", "0.65664583", "0.6565433", "0.6560722", "0.65606016", "0.6553194", "0.6553194", "0.65503496", "0.6549731", "0.6546909", "0.6544467", "0.65359867", "0.6531173" ]
0.0
-1
Key function returns key value.
func (e GetEventByIDResponse_OptionValidationError) Key() bool { return e.key }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *KeyValue) GetKey()(*string) {\n return m.key\n}", "func (f binaryEqualsFunc) key() Key {\n\treturn f.k\n}", "func (m *KeyUint) Key() driver.Value { return driver.Value(m.ID) }", "func (m *OMap) Key(n int) string {\n\treturn m.keys[n]\n}", "func (t *Type) Key() *Type", "func (f nullFunc) key() Key {\n\treturn f.k\n}", "func (v Variable) Key() string {\n\treturn (string)(v)\n}", "func (i GinJwtSignAlgorithm) Key() string {\n\tif val, ok := _GinJwtSignAlgorithmValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (g *Generator) GetKey(K string) interface{} {\n\treturn g.data[K]\n}", "func (m *SearchBucket) GetKey()(*string) {\n return m.key\n}", "func (f *Filter) getKey(key string) string {\n\tif f.HashKeys {\n\t\th := sha1.New()\n\t\ts := h.Sum([]byte(key))\n\t\treturn fmt.Sprintf(\"%x\", s)\n\t}\n\treturn key\n}", "func getKey(ing *extensions.Ingress, t *testing.T) string {\n\tkey, err := keyFunc(ing)\n\tif err != nil {\n\t\tt.Fatalf(\"Unexpected error getting key for Ingress %v: %v\", ing.Name, err)\n\t}\n\treturn key\n}", "func (f *field) Key() string {\n\treturn f.k\n}", "func (i GinBindType) Key() string {\n\tif val, ok := _GinBindTypeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (c Node) GetKey() string {\n\treturn c.key\n}", "func (m *RegistryKeyState) GetKey()(*string) {\n return m.key\n}", "func (akv StringKeyValue) Key() string {\n\treturn akv.orig.Key\n}", "func (a AddItem) Key() string { return string(a) }", "func (area *MineArea) GetKey() string {\n\treturn GetKey(area.X, area.Y)\n}", "func (d *Disk) getKey(p *DiskParams) []byte {\n\treturn []byte(time_util.TimeToName(time.Unix(p.ExicutionTime, 0), fmt.Sprintf(\"%x\", d.hasher.Sum(nil))))\n}", "func (e *OrderedMapElement[K, V]) Key() K {\n\treturn e.element.key\n}", "func getKey(cluster *clusteroperator.Cluster, t *testing.T) string {\n\tif key, err := controller.KeyFunc(cluster); err != nil {\n\t\tt.Errorf(\"Unexpected error getting key for Cluster %v: %v\", cluster.Name, err)\n\t\treturn \"\"\n\t} else {\n\t\treturn key\n\t}\n}", "func cacheKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*cacheEntry).key\n\treturn key, nil\n}", "func (node *Node) Key() interface{} {\n\treturn fmt.Sprintf(\"%v\", node.contents)\n}", "func (s *Mem) Key(key interface{}) string {\n\treturn fmt.Sprintf(\"%v-%v\", s.prefix, key)\n}", "func (vrfs *VRFShare) GetKey() datastore.Key {\n\treturn datastore.ToKey(fmt.Sprintf(\"%v\", vrfs.Round))\n}", "func stringKeyFunc(obj interface{}) (string, error) {\n\tkey := obj.(*nodeidentity.Info).InstanceID\n\treturn key, nil\n}", "func (e Enum) GetKey(value any) string {\n\tfor k, v := range e {\n\t\tif reflect.DeepEqual(v, value) {\n\t\t\treturn k\n\t\t}\n\t}\n\treturn \"\"\n}", "func (m *Map) Key() Type { return m.key }", "func getKey(w http.ResponseWriter, ps httprouter.Params) (string, bool){\n\treturn ps.ByName(\"id\"), true\n}", "func (v *Value) GetKey() *string {\n\tret := C.zj_GetKey(v.V)\n\tif ret == nil {\n\t\treturn nil\n\t}\n\tretStr := C.GoString(ret)\n\treturn &retStr\n}", "func (f *Factor) Key() string { return f.ID }", "func (c *KeyValueChanger) Key() (string, error) {\n\tif c.err != nil {\n\t\treturn \"\", c.err\n\t}\n\treturn c.node.content.key().(string), nil\n}", "func (a DataNodeKV) Key() string {\n\treturn a.K\n}", "func GetKey(allkeys [][]byte, loc Where) []byte {\n\tif loc == Left {\n\t\treturn allkeys[0]\n\t}\n\tif loc == Right {\n\t\treturn allkeys[len(allkeys)-1]\n\t}\n\t// select a random index between 1 and allkeys-2\n\t// nolint:gosec\n\tidx := rand.Int()%(len(allkeys)-2) + 1\n\treturn allkeys[idx]\n}", "func KeyFunc(name, namespace string) string {\n\tif len(namespace) == 0 {\n\t\treturn name\n\t}\n\treturn namespace + \"/\" + name\n}", "func (it *Iterator) Key() string { return it.n.k }", "func (s *session) getKey() string {\n\treturn s.uuid\n}", "func (o SchedulingNodeAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v SchedulingNodeAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (i SNSProtocol) Key() string {\n\tif val, ok := _SNSProtocolValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *Iterator) Key() interface{} { return it.n.k }", "func getkey(key ...interface{}) interface{} {\n\tif len(key) > 0 {\n\t\treturn key[0]\n\t}\n\n\treturn nil\n}", "func (i SNSSubscribeAttribute) Key() string {\n\tif val, ok := _SNSSubscribeAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (it *iterator) Key() []byte {\n\tif len(it.keys) > 0 {\n\t\treturn []byte(it.keys[0])\n\t}\n\treturn nil\n}", "func (this *DefaultHandler) GetKey(xesRedis redo.XesRedisBase) (ret string) {\n\tdefer func() {\n\t\tif xesRedis.GetCtx() == nil {\n\t\t\treturn\n\t\t}\n\t\tbench := xesRedis.GetCtx().Value(\"IS_BENCHMARK\")\n\t\tif cast.ToString(bench) == \"1\" {\n\t\t\tret = \"benchmark_\" + ret\n\t\t}\n\t}()\n\n\tkeyInfo := this.getKeyInfo(xesRedis)\n\tkey := cast.ToString(keyInfo[\"key\"])\n\tif key == \"\" {\n\t\tret = xesRedis.GetKeyName()\n\t\treturn\n\t}\n\tret = fmt.Sprintf(key, (xesRedis.GetKeyParams())...)\n\treturn\n}", "func (st *MemStorage) GetKey(gun, role string) (algorithm string, public []byte, err error) {\n\t// no need for lock. It's ok to return nil if an update\n\t// wasn't observed\n\tg, ok := st.keys[gun]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\tk, ok := g[role]\n\tif !ok {\n\t\treturn \"\", nil, &ErrNoKey{gun: gun}\n\t}\n\n\treturn k.algorithm, k.public, nil\n}", "func (e *EntrySet) Get(key string) string {\n return e.keys[key]\n}", "func (v *V) Key() string {\n\treturn v.key\n}", "func (it *Iter) Key() byte { return it.top().key }", "func (s Stash) Key() string {\n\tvals := utils.MapValues(s.payload)\n\tif len(vals) < 1 {\n\t\treturn \"\"\n\t}\n\n\treturn fmt.Sprintf(\"$%s\", vals[0])\n}", "func (i SNSPlatformApplicationAttribute) Key() string {\n\tif val, ok := _SNSPlatformApplicationAttributeValueToKeyMap[i]; ok {\n\t\t// found\n\t\treturn val\n\t} else {\n\t\t// not found\n\t\treturn \"\"\n\t}\n}", "func (o Operator) Key() string {\n\treturn fmt.Sprintf(\"operator.%s\", o.Aid)\n}", "func (i *StringIterator) Key() Object {\n\treturn &Int{Value: int64(i.i - 1)}\n}", "func (mci *XMCacheIterator) Key() []byte {\n\tif mci.err != nil || mci.dir == dirReleased {\n\t\treturn nil\n\t}\n\tswitch mci.index {\n\tcase 0, 1:\n\t\treturn mci.iters[mci.index].Key()\n\tcase 2:\n\t\tif mci.mc.isPenetrate {\n\t\t\treturn mci.mIter.Key()\n\t\t}\n\t\treturn nil\n\t}\n\treturn nil\n}", "func (s *Arena) getKey(offset uint32, size uint16) []byte {\n\treturn s.data[offset : offset+uint32(size)]\n}", "func (o ReservationAffinityOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ReservationAffinity) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (f DefaultField) Key() string {\n\treturn f.K\n}", "func Key(v string) predicate.Blob {\n\treturn predicate.Blob(\n\t\tfunc(s *sql.Selector) {\n\t\t\ts.Where(sql.EQ(s.C(FieldKey), v))\n\t\t},\n\t)\n}", "func (m Match) Key() string {\n\treturn fmt.Sprintf(\"match:%s\", m.ID())\n}", "func (d *Activity) KeyVal() string {\n\treturn d.ExteralID\n}", "func (key twofishKey) Key() []byte {\n\treturn key[:]\n}", "func getKey(data string) string {\n\tsign := md5.Sum([]byte(data))\n\tsignStr := fmt.Sprintf(\"%x\", sign)\n\treturn signStr[:7]\n}", "func (l *LangPackStringPluralized) GetKey() (value string) {\n\tif l == nil {\n\t\treturn\n\t}\n\treturn l.Key\n}", "func (t Task) Key() string {\n\treturn fmt.Sprintf(\"%s:%s\", t.Name, t.ID)\n}", "func (k Keys) RangeKey() interface{} { return k[1] }", "func (d *DStarLite) keyFor(s *dStarLiteNode) key {\n\t/*\n\t procedure CalculateKey(s)\n\t {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))];\n\t*/\n\tk := key{1: math.Min(s.g, s.rhs)}\n\tk[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier\n\treturn k\n}", "func (stateID StateID) Key() string {\n\treturn string(stateID.LastAppHash)\n}", "func (m *Metric) GetKey() string {\n\tif m == nil || m.Key == nil {\n\t\treturn \"\"\n\t}\n\treturn *m.Key\n}", "func (u User) Key() interface{} {\n\treturn u.ID\n}", "func (b *BitSet) Key() string {\n\tif b == nil {\n\t\treturn \"\"\n\t} else {\n\t\treturn string(b.Bits.Bytes())\n\t}\n}", "func (e EnumByte) Key() EnumByteKey {\n return EnumByteKey(e)\n}", "func (n *lnode) key() []byte {\n\tbuf := (*[maxAllocSize]byte)(unsafe.Pointer(n))\n\treturn buf[n.pos : n.pos+n.ksize]\n}", "func (p *pv) key() pvKey {\n\treturn newPVKey(p.Cluster, p.Name)\n}", "func (i *MapIterator) Key() Object {\n\tk := i.k[i.i-1]\n\treturn &String{Value: k}\n}", "func (k *KVItem) Key() (interface{}, error) {\n\tvar cKey unsafe.Pointer\n\tvar keySize C.uint64_t\n\tvar keyType C.tiledb_datatype_t\n\tret := C.tiledb_kv_item_get_key(k.context.tiledbContext, k.tiledbKVItem, &cKey, &keyType, &keySize)\n\n\tif ret != C.TILEDB_OK {\n\t\treturn nil, fmt.Errorf(\"Error getting key for KVItem: %s\", k.context.LastError())\n\t}\n\n\tswitch Datatype(keyType) {\n\tcase TILEDB_INT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.int8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.int16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.int32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_INT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_int64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.int64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]int64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = int64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.int64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT8:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint8_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint8_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint8, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint8(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int8(*(*C.uint8_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT16:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint16_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint16_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint16, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint16(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int16(*(*C.uint16_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint32_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint32_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int32(*(*C.uint32_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_UINT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_uint64_t\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.uint64_t)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]uint64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = uint64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn int64(*(*C.uint64_t)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT32:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_float\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.float)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float32, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float32(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float32(*(*C.float)(cKey)), nil\n\t\t}\n\tcase TILEDB_FLOAT64:\n\t\t// If the key size is greater than the size of a single value in bytes it is an array\n\t\telements := int(keySize) / C.sizeof_double\n\t\tif elements > 1 {\n\t\t\ttmpslice := (*[1 << 30]C.double)(unsafe.Pointer(cKey))[:elements:elements]\n\t\t\tretSlice := make([]float64, elements)\n\t\t\tfor i, s := range tmpslice {\n\t\t\t\tretSlice[i] = float64(s)\n\t\t\t}\n\t\t\treturn retSlice, nil\n\t\t} else {\n\t\t\treturn float64(*(*C.double)(cKey)), nil\n\t\t}\n\tcase TILEDB_CHAR:\n\t\telements := int(keySize) / C.sizeof_char\n\t\treturn C.GoStringN((*C.char)(cKey), C.int(elements)), nil\n\n\tdefault:\n\t\treturn nil, fmt.Errorf(\"Unsupported tiledb key type: %v\", keyType)\n\t}\n\n\treturn nil, fmt.Errorf(\"Error getting key for KVItem\")\n}", "func (u Users) Key(luid *windows.LUID) (int64, error) {\r\n\tif luid == nil {\r\n\t\treturn 0, errors.New(\"got empty LUID pointer\")\r\n\t}\r\n\tkey := int64(int64(luid.HighPart<<32) + int64(luid.LowPart))\r\n\treturn key, nil\r\n}", "func (a *Anime) Key() string {\n\treturn fmt.Sprintf(\"anime:%d\", a.ID)\n}", "func (m MapEntry) Key() interface{} {\n\treturn m.key\n}", "func (f KeyMakerFunc) KeyFor(r *http.Request) string {\n\treturn f(r)\n}", "func (t *TimeSeries) GetKey() string {\n\treturn t.key\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\thash := m.hash([]byte(key))\n\tn := node{hash: hash, key: key}\n\titer := floor(&m.nodes.Tree, &n)\n\tif iter == m.nodes.End() {\n\t\titer = m.nodes.Begin()\n\t}\n\treturn iter.Node().Key.(*node).key\n}", "func (t *ScheduledTask) Key() string {\n\treturn fmt.Sprintf(taskKeyFormat, keyPrefixScheduled, t.ID, t.score)\n}", "func (it *iterator) Key() []byte {\n\treturn it.current.key\n}", "func (eln *EmptyLeafNode) GetKey() []byte {\n\treturn nil\n}", "func (h dataUsageHash) Key() string {\n\treturn string(h)\n}", "func (c *Container) Key() string {\n\tc.Lock()\n\tdefer c.Unlock()\n\treturn c.ID\n}", "func (c Repository) GetKey(key string) string {\n\tval, err := c.Client.Get(key).Result()\n\n\tif err != nil {\n\t\tlog.Println(err)\n\t}\n\n\treturn val\n}", "func (f Base) Key() string {\n\treturn f.key\n}", "func (o StudioComponentScriptParameterKeyValueOutput) Key() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v StudioComponentScriptParameterKeyValue) *string { return v.Key }).(pulumi.StringPtrOutput)\n}", "func (o *ResourceDefinitionFilter) GetKey() string {\n\tif o == nil {\n\t\tvar ret string\n\t\treturn ret\n\t}\n\n\treturn o.Key\n}", "func (it *KeyAccess_Iterator) Key() interface{} {\n\treturn it.node.key\n}", "func (b Bucket) Key() interface{} {\n\treturn b[\"key\"]\n}", "func (m *Map) Get(key string) string {\n\tif m.IsEmpty() {\n\t\treturn \"\"\n\t}\n\n\thash := int(m.hash([]byte(key)))\n\n\t// Binary search for appropriate replica.\n\tidx := sort.Search(len(m.keys), func(i int) bool { return m.keys[i] >= hash })\n\n\t// Means we have cycled back to the first replica.\n\tif idx == len(m.keys) {\n\t\tidx = 0\n\t}\n\n\treturn m.hashMap[m.keys[idx]]\n}", "func (c *Counter) GetKey() string {\n\treturn c.key\n}", "func Key(id string, fallback string) Reference {\n\treturn key{id, fallback}\n}", "func (a *PositionalAttribute) Key() string {\n\treturn AttrPositionalIndex + strconv.Itoa(a.Index)\n}", "func (n *Node) Key() interface{} {\n\treturn n.key\n}", "func (e Timing) Key() string {\n\treturn e.Name\n}", "func Key(key string) query.Extractor {\n\treturn &keyExtractor{key}\n}", "func (i *Iterator) Key() []byte {\n\treturn i.iterator.Item().KeyCopy(nil)\n}", "func (m *Metric) Key() string {\n\treturn fmt.Sprintf(\"<%s%d%s>\", m.Name, m.Timestamp, m.Tags)\n}" ]
[ "0.7397974", "0.703695", "0.7026126", "0.69730234", "0.69701165", "0.69472975", "0.682121", "0.67752403", "0.6702173", "0.6691155", "0.66223186", "0.6602185", "0.66009104", "0.65937275", "0.65673846", "0.6555592", "0.65304273", "0.6521155", "0.6511681", "0.65062934", "0.64982766", "0.64867014", "0.6477575", "0.6462233", "0.6456774", "0.6456152", "0.6448241", "0.6435275", "0.6423325", "0.6412427", "0.64096636", "0.6403262", "0.6395327", "0.63929945", "0.6382585", "0.6378694", "0.63715774", "0.63671046", "0.635377", "0.63430053", "0.63418114", "0.6339266", "0.63258415", "0.6319039", "0.630293", "0.6300368", "0.6298253", "0.6296133", "0.6295445", "0.6281786", "0.6279424", "0.6277453", "0.6277033", "0.62735796", "0.6269087", "0.6262938", "0.62600297", "0.6259835", "0.6242855", "0.62427336", "0.6239893", "0.6226979", "0.62228185", "0.6216291", "0.62118614", "0.6209014", "0.62075627", "0.619765", "0.6197426", "0.61971486", "0.6196739", "0.6192416", "0.6191223", "0.6183839", "0.6179522", "0.6177141", "0.6172575", "0.61719537", "0.6170614", "0.6162783", "0.61570954", "0.6154456", "0.6152929", "0.615149", "0.61509156", "0.61395836", "0.6138672", "0.61365676", "0.613636", "0.61338246", "0.6133771", "0.6129422", "0.61284614", "0.612092", "0.6119081", "0.61121005", "0.611087", "0.6106958", "0.6106701", "0.61020154", "0.6100722" ]
0.0
-1
ReadResponse reads a server response into the received o.
func (o *PatchHyperflexServerModelsMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 201: result := NewPatchHyperflexServerModelsMoidCreated() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil default: result := NewPatchHyperflexServerModelsMoidDefault(response.Code()) if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } if response.Code()/100 == 2 { return result, nil } return nil, result } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (d *ResourceHandler) ReadResponse(dataOut unsafe.Pointer, bytesToRead int32, bytesRead *int32, callback *Callback) int32 {\n\treturn lookupResourceHandlerProxy(d.Base()).ReadResponse(d, dataOut, bytesToRead, bytesRead, callback)\n}", "func (o *GetServerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetServerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *InteractionBindReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewInteractionBindOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 404:\n\t\tresult := NewInteractionBindNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 500:\n\t\tresult := NewInteractionBindInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *InteractionUnbindReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewInteractionUnbindOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 404:\n\t\tresult := NewInteractionUnbindNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 500:\n\t\tresult := NewInteractionUnbindInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (r *ResponseReader) ReadResponse(req *Request) (res *Response, err error) {\n\tres = CreateEmptyResponse(req)\n\t_, err = readFirstLine(r, res)\n\tif err != nil {\n\t\treturn\n\t}\n\n\terr = readHeaders(r, res)\n\tif err != nil {\n\t\treturn\n\t}\n\n\t_, err = readBodyContent(r, res)\n\tif err != nil {\n\t\treturn\n\t}\n\treturn res, nil\n}", "func (c *Conn) ReadResponse(rmsg *Response) error {\n\tdata, err := c.ReadDataUnit()\n\tif err != nil {\n\t\treturn err\n\t}\n\tcolor.Printf(\"@{c}<!-- RESPONSE -->\\n%s\\n\\n\", string(data))\n\terr = xml.Unmarshal(data, rmsg)\n\tif err != nil {\n\t\treturn err\n\t}\n\t// color.Fprintf(os.Stderr, \"@{y}%s\\n\", spew.Sprintf(\"%+v\", msg))\n\tif len(rmsg.Results) != 0 {\n\t\tr := rmsg.Results[0]\n\t\tif r.IsError() {\n\t\t\treturn r\n\t\t}\n\t}\n\treturn nil\n}", "func (o *VerifyConnectionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewVerifyConnectionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *GetAvailableReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetAvailableOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *ClosePositionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewClosePositionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 400:\n\t\tresult := NewClosePositionBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 401:\n\t\tresult := NewClosePositionUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewClosePositionNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 405:\n\t\tresult := NewClosePositionMethodNotAllowed()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *DescribeServerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDescribeServerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewDescribeServerBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewDescribeServerNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 409:\n\t\tresult := NewDescribeServerConflict()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 500:\n\t\tresult := NewDescribeServerInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *GetServerSessionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetServerSessionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewGetServerSessionBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewGetServerSessionUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetServerSessionNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 500:\n\t\tresult := NewGetServerSessionInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tdata, err := ioutil.ReadAll(response.Body())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn nil, fmt.Errorf(\"Requested GET /dsmcontroller/namespaces/{namespace}/servers/{podName}/session returns an error %d: %s\", response.Code(), string(data))\n\t}\n}", "func (o *StartReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewStartOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (resp *PharosResponse) readResponse() {\n\tif !resp.hasBeenRead && resp.Response != nil && resp.Response.Body != nil {\n\t\tresp.data, resp.Error = ioutil.ReadAll(resp.Response.Body)\n\t\tresp.Response.Body.Close()\n\t\tresp.hasBeenRead = true\n\t}\n}", "func (o *HelloWorldReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewHelloWorldOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewHelloWorldBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 500:\n\t\tresult := NewHelloWorldInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (reader *BasicRpcReader) ReadResponse(r io.Reader, method string, requestID int32, resp proto.Message) error {\n\trrh := &hadoop.RpcResponseHeaderProto{}\n\terr := readRPCPacket(r, rrh, resp)\n\tif err != nil {\n\t\treturn err\n\t} else if int32(rrh.GetCallId()) != requestID {\n\t\treturn errors.New(\"unexpected sequence number\")\n\t} else if rrh.GetStatus() != hadoop.RpcResponseHeaderProto_SUCCESS {\n\t\treturn &NamenodeError{\n\t\t\tmethod: method,\n\t\t\tmessage: rrh.GetErrorMsg(),\n\t\t\tcode: int(rrh.GetErrorDetail()),\n\t\t\texception: rrh.GetExceptionClassName(),\n\t\t}\n\t}\n\n\treturn nil\n}", "func (o *UpdateAntivirusServerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewUpdateAntivirusServerNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tresult := NewUpdateAntivirusServerDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *HasEventsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewHasEventsOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewHasEventsUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 403:\n\t\tresult := NewHasEventsForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *GetV2Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetV2OK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 500:\n\t\tresult := NewGetV2InternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *SaveReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewSaveNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 500:\n\t\tresult := NewSaveInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *TestWriteReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewTestWriteOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewTestWriteUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *AllConnectionsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewAllConnectionsOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewAllConnectionsBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewAllConnectionsNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *SendDataToDeviceReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewSendDataToDeviceOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewSendDataToDeviceBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 500:\n\t\tresult := NewSendDataToDeviceInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *HealthNoopReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewHealthNoopOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *PutOutOfRotationReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewPutOutOfRotationNoContent()\n\t\tresult.HttpResponse = response\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\terrorResult := kbcommon.NewKillbillError(response.Code())\n\t\tif err := consumer.Consume(response.Body(), &errorResult); err != nil && err != io.EOF {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, errorResult\n\t}\n}", "func (o *GetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *StatusReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewStatusOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewStatusUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 403:\n\t\tresult := NewStatusForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *ReplaceServerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewReplaceServerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 202:\n\t\tresult := NewReplaceServerAccepted()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewReplaceServerBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewReplaceServerNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\tresult := NewReplaceServerDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func ReadResponse(r *bfe_bufio.Reader, req *Request) (*Response, error) {\n\ttp := textproto.NewReader(r)\n\tresp := &Response{\n\t\tRequest: req,\n\t}\n\n\t// Parse the first line of the response.\n\tline, err := tp.ReadLine()\n\tif err != nil {\n\t\tif err == io.EOF {\n\t\t\terr = io.ErrUnexpectedEOF\n\t\t}\n\t\treturn nil, err\n\t}\n\tf := strings.SplitN(line, \" \", 3)\n\tif len(f) < 2 {\n\t\treturn nil, &badStringError{\"malformed HTTP response\", line}\n\t}\n\treasonPhrase := \"\"\n\tif len(f) > 2 {\n\t\treasonPhrase = f[2]\n\t}\n\tresp.Status = f[1] + \" \" + reasonPhrase\n\tresp.StatusCode, err = strconv.Atoi(f[1])\n\tif err != nil {\n\t\treturn nil, &badStringError{\"malformed HTTP status code\", f[1]}\n\t}\n\n\tresp.Proto = f[0]\n\tvar ok bool\n\tif resp.ProtoMajor, resp.ProtoMinor, ok = ParseHTTPVersion(resp.Proto); !ok {\n\t\treturn nil, &badStringError{\"malformed HTTP version\", resp.Proto}\n\t}\n\n\t// Parse the response headers.\n\tmimeHeader, err := tp.ReadMIMEHeader()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tresp.Header = Header(mimeHeader)\n\n\tfixPragmaCacheControl(resp.Header)\n\n\terr = readTransfer(resp, r)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn resp, nil\n}", "func (o *PostChatroomsChannelHashReadReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewPostChatroomsChannelHashReadOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 403:\n\t\tresult := NewPostChatroomsChannelHashReadForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *TogglePacketGeneratorsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 201:\n\t\tresult := NewTogglePacketGeneratorsCreated()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *FrontPutBinaryReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewFrontPutBinaryOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *SystemPingReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewSystemPingOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 500:\n\t\tresult := NewSystemPingInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *SendDummyAlertReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewSendDummyAlertOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 400:\n\t\tresult := NewSendDummyAlertBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewSendDummyAlertNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *GetViewsConnectionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetViewsConnectionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewGetViewsConnectionBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *SyncCopyReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewSyncCopyOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewSyncCopyDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *PostPatientsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewPostPatientsOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 400:\n\t\tresult := NewPostPatientsBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 405:\n\t\tresult := NewPostPatientsMethodNotAllowed()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (c *Conn) readResponse(res *response_) error {\n\terr := c.readDataUnit()\n\tif err != nil {\n\t\treturn err\n\t}\n\terr = IgnoreEOF(scanResponse.Scan(c.decoder, res))\n\tif err != nil {\n\t\treturn err\n\t}\n\tif res.Result.IsError() {\n\t\treturn res.Result\n\t}\n\treturn nil\n}", "func (o *AllConnectionsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n switch response.Code() {\n \n case 200:\n result := NewAllConnectionsOK()\n if err := result.readResponse(response, consumer, o.formats); err != nil {\n return nil, err\n }\n return result, nil\n \n case 400:\n result := NewAllConnectionsBadRequest()\n if err := result.readResponse(response, consumer, o.formats); err != nil {\n return nil, err\n }\n return nil, result\n \n case 404:\n result := NewAllConnectionsNotFound()\n if err := result.readResponse(response, consumer, o.formats); err != nil {\n return nil, err\n }\n return nil, result\n \n default:\n return nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n }\n}", "func (o *GetMsgVpnReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetMsgVpnOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tresult := NewGetMsgVpnDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (r *Response) Read(p []byte) (n int, err error) {\n\n\tif r.Error != nil {\n\t\treturn -1, r.Error\n\t}\n\n\treturn r.RawResponse.Body.Read(p)\n}", "func (o *PostPciLinksMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 201:\n\t\tresult := NewPostPciLinksMoidCreated()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewPostPciLinksMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *THSRAPIODFare2121Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewTHSRAPIODFare2121OK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 299:\n\t\tresult := NewTHSRAPIODFare2121Status299()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 304:\n\t\tresult := NewTHSRAPIODFare2121NotModified()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *PostGatewayConnectNetaddressReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewPostGatewayConnectNetaddressNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewPostGatewayConnectNetaddressDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *DNSGetReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDNSGetOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewDNSGetDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *GetGreetStatusReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetGreetStatusOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *PostAPIV2EventsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 201:\n\t\tresult := NewPostAPIV2EventsNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewPostAPIV2EventsBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 403:\n\t\tresult := NewPostAPIV2EventsForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *CreateAntivirusServerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewCreateAntivirusServerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tresult := NewCreateAntivirusServerDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *PostCarsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewPostCarsOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 405:\n\t\tresult := NewPostCarsMethodNotAllowed()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *LogReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewLogOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewLogNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *ChatGetConnectedReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewChatGetConnectedOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 400:\n\t\tresult := NewChatGetConnectedBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 401:\n\t\tresult := NewChatGetConnectedUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewChatGetConnectedNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *WebModifyReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewWebModifyOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 202:\n\t\tresult := NewWebModifyAccepted()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewWebModifyDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *GetHyperflexServerModelsMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetHyperflexServerModelsMoidOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetHyperflexServerModelsMoidNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\tresult := NewGetHyperflexServerModelsMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *KillQueryReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewKillQueryNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewKillQueryBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewKillQueryNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 422:\n\t\tresult := NewKillQueryUnprocessableEntity()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *GetProgressionViewReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetProgressionViewOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewGetProgressionViewBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *UpdateRackTopoReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewUpdateRackTopoOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewUpdateRackTopoBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *UtilTestReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewUtilTestOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *GetByUIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetByUIDOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetByUIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *GetMeReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetMeOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewGetMeDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *Delete1Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewDelete1NoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewDelete1NotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *RevokeReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewRevokeOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 401:\n\t\tresult := NewRevokeUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewRevokeNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *PostGatewayDisconnectNetaddressReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewPostGatewayDisconnectNetaddressNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewPostGatewayDisconnectNetaddressDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *GetProtocolsUsingGETReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetProtocolsUsingGETOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *DestroySessionUsingPOSTReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDestroySessionUsingPOSTOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *CompleteTransactionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewCompleteTransactionNoContent()\n\t\tresult.HttpResponse = response\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\terrorResult := kbcommon.NewKillbillError(response.Code())\n\t\tif err := consumer.Consume(response.Body(), &errorResult); err != nil && err != io.EOF {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, errorResult\n\t}\n}", "func (o *GetMapNameEventsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetMapNameEventsOK(o.writer)\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetMapNameEventsNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *RecoveryReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewRecoveryOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 500:\n\t\tresult := NewRecoveryInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *GetPeersReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetPeersOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 403:\n\t\tresult := NewGetPeersForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *InstallEventsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewInstallEventsOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *SetMemoRequiredReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewSetMemoRequiredOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewSetMemoRequiredBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 500:\n\t\tresult := NewSetMemoRequiredInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *UpdateRackTopoReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewUpdateRackTopoOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewUpdateRackTopoBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewUpdateRackTopoNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 500:\n\t\tresult := NewUpdateRackTopoInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *GetVoicesReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetVoicesOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *BounceReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tresult := NewBounceDefault(response.Code())\n\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\treturn nil, err\n\t}\n\tif response.Code()/100 == 2 {\n\t\treturn result, nil\n\t}\n\treturn nil, result\n}", "func (o *PostHyperflexHxdpVersionsMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 201:\n\t\tresult := NewPostHyperflexHxdpVersionsMoidCreated()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewPostHyperflexHxdpVersionsMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *GetObmsLibraryIdentifierReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetObmsLibraryIdentifierOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 404:\n\t\tresult := NewGetObmsLibraryIdentifierNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\tresult := NewGetObmsLibraryIdentifierDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *DeleteApplianceRestoresMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDeleteApplianceRestoresMoidOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewDeleteApplianceRestoresMoidNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\tresult := NewDeleteApplianceRestoresMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *UserQuerySessionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewUserQuerySessionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewUserQuerySessionBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 500:\n\t\tresult := NewUserQuerySessionInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tdata, err := ioutil.ReadAll(response.Body())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn nil, fmt.Errorf(\"Requested GET /sessionbrowser/namespaces/{namespace}/gamesession returns an error %d: %s\", response.Code(), string(data))\n\t}\n}", "func (o *GetDiscoverReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetDiscoverOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (r *overwriteConsumerReader) ReadResponse(resp runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tif r.forStatusCode == ForAllStatusCodes || resp.Code() == r.forStatusCode {\n\t\treturn r.requestReader.ReadResponse(resp, r.consumer)\n\t}\n\n\treturn r.requestReader.ReadResponse(resp, consumer)\n}", "func (o *UnclaimTrafficFilterLinkIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewUnclaimTrafficFilterLinkIDOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewUnclaimTrafficFilterLinkIDBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 500:\n\t\tresult := NewUnclaimTrafficFilterLinkIDInternalServerError()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *GetDebugRequestReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewGetDebugRequestOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 404:\n\t\tresult := NewGetDebugRequestNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *ChangeaspecificSpeedDialReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 204:\n\t\tresult := NewChangeaspecificSpeedDialNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *PostMemoryArraysMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 201:\n\t\tresult := NewPostMemoryArraysMoidCreated()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewPostMemoryArraysMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (c *Client) readResponse(conn net.Conn) ([]byte, error) {\n\tif c.Timeout > 0 {\n\t\t_ = conn.SetReadDeadline(time.Now().Add(c.Timeout))\n\t}\n\n\tproto := \"udp\"\n\tif _, ok := conn.(*net.TCPConn); ok {\n\t\tproto = \"tcp\"\n\t}\n\n\tif proto == \"udp\" {\n\t\tbufSize := c.UDPSize\n\t\tif bufSize == 0 {\n\t\t\tbufSize = dns.MinMsgSize\n\t\t}\n\t\tresponse := make([]byte, bufSize)\n\t\tn, err := conn.Read(response)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn response[:n], nil\n\t}\n\n\t// If we got here, this is a TCP connection\n\t// so we should read a 2-byte prefix first\n\treturn readPrefixed(conn)\n}", "func (o *PayReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewPayOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 400:\n\t\tresult := NewPayBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewPayNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 409:\n\t\tresult := NewPayConflict()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\tdata, err := ioutil.ReadAll(response.Body())\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn nil, fmt.Errorf(\"Requested POST /platform/public/namespaces/{namespace}/payment/orders/{paymentOrderNo}/pay returns an error %d: %s\", response.Code(), string(data))\n\t}\n}", "func (o *CountReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewCountOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 400:\n\t\tresult := NewCountBadRequest()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *PostNodesIdentifierObmIdentifyReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 201:\n\t\tresult := NewPostNodesIdentifierObmIdentifyCreated()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 404:\n\t\tresult := NewPostNodesIdentifierObmIdentifyNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\tresult := NewPostNodesIdentifierObmIdentifyDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *GetInterpreterReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetInterpreterOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewGetInterpreterNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *DeleteEventsEventIDReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 204:\n\t\tresult := NewDeleteEventsEventIDNoContent()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 401:\n\t\tresult := NewDeleteEventsEventIDUnauthorized()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tcase 404:\n\t\tresult := NewDeleteEventsEventIDNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *UtilityServiceReadyReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewUtilityServiceReadyOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewUtilityServiceReadyDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *HTTPGetPersistenceItemDataReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewHTTPGetPersistenceItemDataOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewHTTPGetPersistenceItemDataNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *SubscriptionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewSubscriptionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *PostEquipmentIoExpandersMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 201:\n\t\tresult := NewPostEquipmentIoExpandersMoidCreated()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewPostEquipmentIoExpandersMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *FrontSessionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewFrontSessionOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (pr *PushedRequest) ReadResponse(ctx context.Context) (*http.Response, error) {\n\tselect {\n\tcase <-ctx.Done():\n\t\tpr.Cancel()\n\t\tpr.pushedStream.bufPipe.CloseWithError(ctx.Err())\n\t\treturn nil, ctx.Err()\n\tcase <-pr.pushedStream.peerReset:\n\t\treturn nil, pr.pushedStream.resetErr\n\tcase resErr := <-pr.pushedStream.resc:\n\t\tif resErr.err != nil {\n\t\t\tfmt.Println(resErr.err.Error())\n\t\t\tpr.Cancel()\n\t\t\tpr.pushedStream.bufPipe.CloseWithError(resErr.err)\n\t\t\treturn nil, resErr.err\n\t\t}\n\t\tresErr.res.Request = pr.Promise\n\t\tresErr.res.TLS = pr.pushedStream.cc.tlsState\n\t\treturn resErr.res, resErr.err\n\t}\n}", "func (o *GetZippedReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tresult := NewGetZippedDefault(response.Code())\n\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\treturn nil, err\n\t}\n\tif response.Code()/100 == 2 {\n\t\treturn result, nil\n\t}\n\treturn nil, result\n}", "func (o *DeleteFirmwareUpgradesMoidReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewDeleteFirmwareUpgradesMoidOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 404:\n\t\tresult := NewDeleteFirmwareUpgradesMoidNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\tresult := NewDeleteFirmwareUpgradesMoidDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *GetEtherPhysicalPortsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetEtherPhysicalPortsOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tdefault:\n\t\tresult := NewGetEtherPhysicalPortsDefault(response.Code())\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif response.Code()/100 == 2 {\n\t\t\treturn result, nil\n\t\t}\n\t\treturn nil, result\n\t}\n}", "func (o *ZoneStreamReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewZoneStreamOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}", "func (o *ByNamespaceReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\n\tcase 200:\n\t\tresult := NewByNamespaceOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\n\tcase 404:\n\t\tresult := NewByNamespaceNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"unknown error\", response, response.Code())\n\t}\n}", "func (o *GetRequestTrackerReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {\n\tswitch response.Code() {\n\tcase 200:\n\t\tresult := NewGetRequestTrackerOK()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn result, nil\n\tcase 403:\n\t\tresult := NewGetRequestTrackerForbidden()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tcase 404:\n\t\tresult := NewGetRequestTrackerNotFound()\n\t\tif err := result.readResponse(response, consumer, o.formats); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\treturn nil, result\n\tdefault:\n\t\treturn nil, runtime.NewAPIError(\"response status code does not match any response statuses defined for this endpoint in the swagger spec\", response, response.Code())\n\t}\n}" ]
[ "0.7640225", "0.7607834", "0.75210214", "0.7509121", "0.74803215", "0.74724656", "0.7433606", "0.74244606", "0.7375357", "0.7367311", "0.73589337", "0.73551553", "0.7350114", "0.7347274", "0.7346054", "0.733966", "0.7336042", "0.73239547", "0.7315819", "0.73155594", "0.7310195", "0.730769", "0.72904205", "0.7287086", "0.72826135", "0.72742283", "0.7274111", "0.72655845", "0.726384", "0.7262403", "0.7255057", "0.72496617", "0.72492456", "0.72479755", "0.72409135", "0.7224629", "0.722366", "0.7219326", "0.7216009", "0.72122216", "0.72110355", "0.72099286", "0.7209348", "0.72004783", "0.71978456", "0.719778", "0.71926844", "0.7177653", "0.71745974", "0.71737057", "0.716626", "0.7155474", "0.71500206", "0.7149536", "0.7148374", "0.7143972", "0.7143686", "0.7141745", "0.71397567", "0.713703", "0.7136677", "0.7136661", "0.7135863", "0.7135147", "0.71337897", "0.71312535", "0.7124361", "0.7123878", "0.71200526", "0.7120036", "0.7119569", "0.7104891", "0.7100936", "0.70989054", "0.70989", "0.70984536", "0.70977753", "0.709657", "0.70961034", "0.70941985", "0.70932794", "0.70886916", "0.70850074", "0.7083912", "0.7080819", "0.7078785", "0.70775825", "0.70765215", "0.7076268", "0.7070042", "0.70699906", "0.7068155", "0.7068122", "0.7066828", "0.70625323", "0.70621973", "0.70599294", "0.70577264", "0.7054454", "0.70509636" ]
0.71148854
71
NewPatchHyperflexServerModelsMoidCreated creates a PatchHyperflexServerModelsMoidCreated with default headers values
func NewPatchHyperflexServerModelsMoidCreated() *PatchHyperflexServerModelsMoidCreated { return &PatchHyperflexServerModelsMoidCreated{} }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewPatchHyperflexServerModelsMoidDefault(code int) *PatchHyperflexServerModelsMoidDefault {\n\treturn &PatchHyperflexServerModelsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerModel(ctx context.Context, moid string) ApiPatchHyperflexServerModelRequest {\n\treturn ApiPatchHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) CreateHyperflexServerModel(ctx context.Context) ApiCreateHyperflexServerModelRequest {\n\treturn ApiCreateHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (a *Client) PatchHyperflexCapabilityInfosMoid(params *PatchHyperflexCapabilityInfosMoidParams) (*PatchHyperflexCapabilityInfosMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchHyperflexCapabilityInfosMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchHyperflexCapabilityInfosMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/hyperflex/CapabilityInfos/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchHyperflexCapabilityInfosMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchHyperflexCapabilityInfosMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchHyperflexCapabilityInfosMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func NewPatchHyperflexSoftwareVersionPoliciesMoidCreated() *PatchHyperflexSoftwareVersionPoliciesMoidCreated {\n\treturn &PatchHyperflexSoftwareVersionPoliciesMoidCreated{}\n}", "func NewPatchHyperflexSoftwareVersionPoliciesMoidDefault(code int) *PatchHyperflexSoftwareVersionPoliciesMoidDefault {\n\treturn &PatchHyperflexSoftwareVersionPoliciesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerModelExecute(r ApiPatchHyperflexServerModelRequest) (*HyperflexServerModel, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerModel\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerModel\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerModels/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerModel == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerModel is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerModel\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersion(ctx context.Context, moid string) ApiPatchHyperflexServerFirmwareVersionRequest {\n\treturn ApiPatchHyperflexServerFirmwareVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *Client) PostHyperflexCapabilityInfosMoid(params *PostHyperflexCapabilityInfosMoidParams) (*PostHyperflexCapabilityInfosMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPostHyperflexCapabilityInfosMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PostHyperflexCapabilityInfosMoid\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/hyperflex/CapabilityInfos/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PostHyperflexCapabilityInfosMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PostHyperflexCapabilityInfosMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PostHyperflexCapabilityInfosMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (a *Client) PatchHyperflexClusterStoragePoliciesMoid(params *PatchHyperflexClusterStoragePoliciesMoidParams) (*PatchHyperflexClusterStoragePoliciesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchHyperflexClusterStoragePoliciesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchHyperflexClusterStoragePoliciesMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/hyperflex/ClusterStoragePolicies/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchHyperflexClusterStoragePoliciesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchHyperflexClusterStoragePoliciesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchHyperflexClusterStoragePoliciesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (a *HyperflexApiService) CreateHyperflexServerModelExecute(r ApiCreateHyperflexServerModelRequest) (*HyperflexServerModel, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerModel\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.CreateHyperflexServerModel\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerModels\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerModel == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerModel is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\tif r.ifNoneMatch != nil {\n\t\tlocalVarHeaderParams[\"If-None-Match\"] = parameterToString(*r.ifNoneMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerModel\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *Client) PatchHyperflexClusterProfilesMoid(params *PatchHyperflexClusterProfilesMoidParams) (*PatchHyperflexClusterProfilesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchHyperflexClusterProfilesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchHyperflexClusterProfilesMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/hyperflex/ClusterProfiles/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchHyperflexClusterProfilesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchHyperflexClusterProfilesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchHyperflexClusterProfilesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func createServer(w http.ResponseWriter, r *http.Request) {\n w.Header().Set(\"Content-Type\", \"application/json\")\n\n\tvar newServer server\n\treqBody, err := ioutil.ReadAll(r.Body)\n\tif err != nil {\n\t fmt.Println(err)\n\t\tfmt.Fprintf(w, \"Kindly enter data with the server's address, MSA and MTA network addresses only in order to create new server\")\n\t w.WriteHeader(http.StatusInternalServerError)\n\t return\n\t}\n\tnewServer.ID = strconv.Itoa(len(servers)+1)\n\n\tjson.Unmarshal(reqBody, &newServer)\n\tservers = append(servers, newServer)\n\tw.WriteHeader(http.StatusCreated)\n\n\tjson.NewEncoder(w).Encode(newServer)\n}", "func (a *HyperflexApiService) CreateHyperflexServerFirmwareVersion(ctx context.Context) ApiCreateHyperflexServerFirmwareVersionRequest {\n\treturn ApiCreateHyperflexServerFirmwareVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionEntry(ctx context.Context, moid string) ApiPatchHyperflexServerFirmwareVersionEntryRequest {\n\treturn ApiPatchHyperflexServerFirmwareVersionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHxdpVersion(ctx context.Context, moid string) ApiPatchHyperflexHxdpVersionRequest {\n\treturn ApiPatchHyperflexHxdpVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *Client) PostHyperflexClusterProfilesMoid(params *PostHyperflexClusterProfilesMoidParams) (*PostHyperflexClusterProfilesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPostHyperflexClusterProfilesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PostHyperflexClusterProfilesMoid\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/hyperflex/ClusterProfiles/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PostHyperflexClusterProfilesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PostHyperflexClusterProfilesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PostHyperflexClusterProfilesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (a *HyperflexApiService) CreateHyperflexServerFirmwareVersionEntry(ctx context.Context) ApiCreateHyperflexServerFirmwareVersionEntryRequest {\n\treturn ApiCreateHyperflexServerFirmwareVersionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (m *TermStoreRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, requestConfiguration *TermStoreRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *HyperflexApiService) PatchHyperflexInitiatorGroup(ctx context.Context, moid string) ApiPatchHyperflexInitiatorGroupRequest {\n\treturn ApiPatchHyperflexInitiatorGroupRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewGetHyperflexServerModelsMoidDefault(code int) *GetHyperflexServerModelsMoidDefault {\n\treturn &GetHyperflexServerModelsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *Client) PostHyperflexClusterStoragePoliciesMoid(params *PostHyperflexClusterStoragePoliciesMoidParams) (*PostHyperflexClusterStoragePoliciesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPostHyperflexClusterStoragePoliciesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PostHyperflexClusterStoragePoliciesMoid\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/hyperflex/ClusterStoragePolicies/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PostHyperflexClusterStoragePoliciesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PostHyperflexClusterStoragePoliciesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PostHyperflexClusterStoragePoliciesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (m *ConnectedOrganizationItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConnectedOrganizationable, requestConfiguration *ConnectedOrganizationItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *AssignedToTaskBoardFormatRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.PlannerAssignedToTaskBoardTaskFormatable, requestConfiguration *AssignedToTaskBoardFormatRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func CreateServerMatchmakingTicket(settings *playfab.Settings, postData *CreateServerMatchmakingTicketRequestModel, entityToken string) (*CreateMatchmakingTicketResultModel, error) {\n if entityToken == \"\" {\n return nil, playfab.NewCustomError(\"entityToken should not be an empty string\", playfab.ErrorGeneric)\n }\n b, errMarshal := json.Marshal(postData)\n if errMarshal != nil {\n return nil, playfab.NewCustomError(errMarshal.Error(), playfab.ErrorMarshal)\n }\n\n sourceMap, err := playfab.Request(settings, b, \"/Match/CreateServerMatchmakingTicket\", \"X-EntityToken\", entityToken)\n if err != nil {\n return nil, err\n }\n \n result := &CreateMatchmakingTicketResultModel{}\n\n config := mapstructure.DecoderConfig{\n DecodeHook: playfab.StringToDateTimeHook,\n Result: result,\n }\n \n decoder, errDecoding := mapstructure.NewDecoder(&config)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n \n errDecoding = decoder.Decode(sourceMap)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n\n return result, nil\n}", "func (a *HyperflexApiService) PatchHyperflexTarget(ctx context.Context, moid string) ApiPatchHyperflexTargetRequest {\n\treturn ApiPatchHyperflexTargetRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *ManagedDeviceItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedDeviceable, requestConfiguration *ManagedDeviceItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *TeamsAppItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TeamsAppable, requestConfiguration *TeamsAppItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *HyperflexApiService) UpdateHyperflexServerModel(ctx context.Context, moid string) ApiUpdateHyperflexServerModelRequest {\n\treturn ApiUpdateHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *BrandingRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.OrganizationalBrandingable, requestConfiguration *BrandingRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsSignalingProtocolsRsvpTeInterfaceAttributesInterfaceHellos(ctx context.Context, name string, interfaceId string, frinxOpenconfigMplsRsvpMplsrsvphellosHellosBodyParam FrinxOpenconfigMplsRsvpMplsrsvphellosHellosRequest1, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:signaling-protocols/frinx-openconfig-network-instance:rsvp-te/frinx-openconfig-network-instance:interface-attributes/frinx-openconfig-network-instance:interface/{interface-id}/frinx-openconfig-network-instance:hellos/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"interface-id\"+\"}\", fmt.Sprintf(\"%v\", interfaceId), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsRsvpMplsrsvphellosHellosBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (m *ManagedAppRegistrationItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ManagedAppRegistrationable, requestConfiguration *ManagedAppRegistrationItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *FeatureRolloutPolicyItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.FeatureRolloutPolicyable, requestConfiguration *FeatureRolloutPolicyItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func CreateOrUpdateMacBinding(sbClient libovsdbclient.Client, mb *sbdb.MACBinding, fields ...interface{}) error {\n\tif len(fields) == 0 {\n\t\tfields = onModelUpdatesAllNonDefault()\n\t}\n\n\topModel := operationModel{\n\t\tModel: mb,\n\t\tOnModelUpdates: fields,\n\t\tErrNotFound: false,\n\t\tBulkOp: false,\n\t}\n\n\tm := newModelClient(sbClient)\n\t_, err := m.CreateOrUpdate(opModel)\n\treturn err\n}", "func (r ApiPatchHyperflexServerModelRequest) HyperflexServerModel(hyperflexServerModel HyperflexServerModel) ApiPatchHyperflexServerModelRequest {\n\tr.hyperflexServerModel = &hyperflexServerModel\n\treturn r\n}", "func CreatePatch(in *ecr.Repository, target *v1alpha1.RepositoryParameters) (*v1alpha1.RepositoryParameters, error) {\n\tcurrentParams := &v1alpha1.RepositoryParameters{}\n\tLateInitializeRepository(currentParams, in)\n\n\tjsonPatch, err := awsclients.CreateJSONPatch(currentParams, target)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpatch := &v1alpha1.RepositoryParameters{}\n\tif err := json.Unmarshal(jsonPatch, patch); err != nil {\n\t\treturn nil, err\n\t}\n\treturn patch, nil\n}", "func CreateUpdateMeshFeatureRequest() (request *UpdateMeshFeatureRequest) {\n\trequest = &UpdateMeshFeatureRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"servicemesh\", \"2020-01-11\", \"UpdateMeshFeature\", \"servicemesh\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}", "func (m *TeamworkRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserTeamworkable, requestConfiguration *TeamworkRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *TeamItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Teamable, requestConfiguration *TeamItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (r ApiCreateHyperflexServerModelRequest) HyperflexServerModel(hyperflexServerModel HyperflexServerModel) ApiCreateHyperflexServerModelRequest {\n\tr.hyperflexServerModel = &hyperflexServerModel\n\treturn r\n}", "func (a *HyperflexApiService) CreateHyperflexHxdpVersion(ctx context.Context) ApiCreateHyperflexHxdpVersionRequest {\n\treturn ApiCreateHyperflexHxdpVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func NewPostHyperflexHxdpVersionsMoidDefault(code int) *PostHyperflexHxdpVersionsMoidDefault {\n\treturn &PostHyperflexHxdpVersionsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (m *MultiValueLegacyExtendedPropertyItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MultiValueLegacyExtendedPropertyable, requestConfiguration *MultiValueLegacyExtendedPropertyItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *DefaultApiService) CreateVM(ctx _context.Context, vmConfig VmConfig) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPut\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/vm.create\"\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &vmConfig\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func (m *StoreItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body ia3c27b33aa3d3ed80f9de797c48fbb8ed73f13887e301daf51f08450e9a634a3.Storeable, requestConfiguration *StoreItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (client VersionsClient) CreateOrUpdateResponder(resp *http.Response) (result VersionTemplatespecs, err error) {\n\terr = autorest.Respond(\n\t\tresp,\n\t\tazure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),\n\t\tautorest.ByUnmarshallingJSON(&result),\n\t\tautorest.ByClosing())\n\tresult.Response = autorest.Response{Response: resp}\n\treturn\n}", "func (m *AccessReviewHistoryDefinitionItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AccessReviewHistoryDefinitionable, requestConfiguration *AccessReviewHistoryDefinitionItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *HyperflexApiService) PatchHyperflexLun(ctx context.Context, moid string) ApiPatchHyperflexLunRequest {\n\treturn ApiPatchHyperflexLunRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPatchGraphicsControllersMoidCreated() *PatchGraphicsControllersMoidCreated {\n\treturn &PatchGraphicsControllersMoidCreated{}\n}", "func (a *HyperflexApiService) PatchHyperflexDrive(ctx context.Context, moid string) ApiPatchHyperflexDriveRequest {\n\treturn ApiPatchHyperflexDriveRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *UserFlowLanguagePageItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.UserFlowLanguagePageable, requestConfiguration *UserFlowLanguagePageItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *HyperflexApiService) DeleteHyperflexServerModel(ctx context.Context, moid string) ApiDeleteHyperflexServerModelRequest {\n\treturn ApiDeleteHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPostHyperflexHxdpVersionsMoidCreated() *PostHyperflexHxdpVersionsMoidCreated {\n\treturn &PostHyperflexHxdpVersionsMoidCreated{}\n}", "func (a *HyperflexApiService) PatchHyperflexCluster(ctx context.Context, moid string) ApiPatchHyperflexClusterRequest {\n\treturn ApiPatchHyperflexClusterRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *FileRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.AgreementFileable, requestConfiguration *FileRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *Client) PatchStorageFlexFlashPhysicalDrivesMoid(params *PatchStorageFlexFlashPhysicalDrivesMoidParams) (*PatchStorageFlexFlashPhysicalDrivesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchStorageFlexFlashPhysicalDrivesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchStorageFlexFlashPhysicalDrivesMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/storage/FlexFlashPhysicalDrives/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchStorageFlexFlashPhysicalDrivesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchStorageFlexFlashPhysicalDrivesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchStorageFlexFlashPhysicalDrivesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsLspsConstrainedPathTunnelsTunnelP2pTunnelAttributesP2pPrimaryPath(ctx context.Context, name string, tunnelName string, frinxOpenconfigMplsP2pprimarypathstopP2pPrimaryPathBodyParam FrinxOpenconfigMplsP2pprimarypathstopP2pPrimaryPathRequest, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:lsps/frinx-openconfig-network-instance:constrained-path/frinx-openconfig-network-instance:tunnels/frinx-openconfig-network-instance:tunnel/{tunnel-name}/frinx-openconfig-network-instance:p2p-tunnel-attributes/frinx-openconfig-network-instance:p2p-primary-path/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"tunnel-name\"+\"}\", fmt.Sprintf(\"%v\", tunnelName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsP2pprimarypathstopP2pPrimaryPathBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (m *DirectoryRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.RbacApplicationable, requestConfiguration *DirectoryRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsSignalingProtocolsRsvpTeInterfaceAttributesInterfaceHellosConfig(ctx context.Context, name string, interfaceId string, frinxOpenconfigMplsRsvpMplsrsvphellosHellosConfigBodyParam FrinxOpenconfigMplsRsvpMplsrsvphellosHellosConfigRequest1, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:signaling-protocols/frinx-openconfig-network-instance:rsvp-te/frinx-openconfig-network-instance:interface-attributes/frinx-openconfig-network-instance:interface/{interface-id}/frinx-openconfig-network-instance:hellos/frinx-openconfig-network-instance:config/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"interface-id\"+\"}\", fmt.Sprintf(\"%v\", interfaceId), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsRsvpMplsrsvphellosHellosConfigBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func Created(c *routing.Context, msg string, service string) error {\n\tResponse(c, `{\"error\": false, \"msg\": \"`+msg+`\"}`, 201, service, \"application/json\")\n\treturn nil\n}", "func (m *DeviceRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Deviceable, requestConfiguration *DeviceRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionExecute(r ApiPatchHyperflexServerFirmwareVersionRequest) (*HyperflexServerFirmwareVersion, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerFirmwareVersion\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerFirmwareVersion\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerFirmwareVersions/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerFirmwareVersion == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerFirmwareVersion is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerFirmwareVersion\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func createPatch(patches []k8sutils.JSONPatchOp, schedPath string) ([]byte, error) {\n\tallPatches := append(patches, k8sutils.JSONPatchOp{\n\t\tOperation: \"replace\",\n\t\tPath: schedPath,\n\t\tValue: []byte(strconv.Quote(storkScheduler)),\n\t})\n\tpatchBytes, err := json.Marshal(&allPatches)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to marshal the patch object: %w\", err)\n\t}\n\treturn patchBytes, nil\n}", "func (a *HyperflexApiService) PatchHyperflexHypervisorVirtualMachine(ctx context.Context, moid string) ApiPatchHyperflexHypervisorVirtualMachineRequest {\n\treturn ApiPatchHyperflexHypervisorVirtualMachineRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (m *TermsAndConditionsItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.TermsAndConditionsable, requestConfiguration *TermsAndConditionsItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *ChatItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.Chatable, requestConfiguration *ChatItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *Client) PatchIamAPIKeysMoid(params *PatchIamAPIKeysMoidParams) (*PatchIamAPIKeysMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchIamAPIKeysMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchIamAPIKeysMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/iam/ApiKeys/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchIamAPIKeysMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchIamAPIKeysMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchIamAPIKeysMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (m *ConditionalAccessRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ConditionalAccessRootable, requestConfiguration *ConditionalAccessRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (client *MachineExtensionsClient) createOrUpdateCreateRequest(ctx context.Context, resourceGroupName string, name string, extensionName string, extensionParameters MachineExtension, options *MachineExtensionsClientBeginCreateOrUpdateOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ConnectedVMwarevSphere/virtualMachines/{name}/extensions/{extensionName}\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif name == \"\" {\n\t\treturn nil, errors.New(\"parameter name cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{name}\", url.PathEscape(name))\n\tif extensionName == \"\" {\n\t\treturn nil, errors.New(\"parameter extensionName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{extensionName}\", url.PathEscape(extensionName))\n\treq, err := runtime.NewRequest(ctx, http.MethodPut, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2022-01-10-preview\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, runtime.MarshalAsJSON(req, extensionParameters)\n}", "func NewPatchVnicEthNetworkPoliciesMoidCreated() *PatchVnicEthNetworkPoliciesMoidCreated {\n\treturn &PatchVnicEthNetworkPoliciesMoidCreated{}\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsSignalingProtocolsRsvpTeGlobalHellos(ctx context.Context, name string, frinxOpenconfigMplsRsvpMplsrsvphellosHellosBodyParam FrinxOpenconfigMplsRsvpMplsrsvphellosHellosRequest, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:signaling-protocols/frinx-openconfig-network-instance:rsvp-te/frinx-openconfig-network-instance:global/frinx-openconfig-network-instance:hellos/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsRsvpMplsrsvphellosHellosBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func CreateServerBackfillTicket(settings *playfab.Settings, postData *CreateServerBackfillTicketRequestModel, entityToken string) (*CreateServerBackfillTicketResultModel, error) {\n if entityToken == \"\" {\n return nil, playfab.NewCustomError(\"entityToken should not be an empty string\", playfab.ErrorGeneric)\n }\n b, errMarshal := json.Marshal(postData)\n if errMarshal != nil {\n return nil, playfab.NewCustomError(errMarshal.Error(), playfab.ErrorMarshal)\n }\n\n sourceMap, err := playfab.Request(settings, b, \"/Match/CreateServerBackfillTicket\", \"X-EntityToken\", entityToken)\n if err != nil {\n return nil, err\n }\n \n result := &CreateServerBackfillTicketResultModel{}\n\n config := mapstructure.DecoderConfig{\n DecodeHook: playfab.StringToDateTimeHook,\n Result: result,\n }\n \n decoder, errDecoding := mapstructure.NewDecoder(&config)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n \n errDecoding = decoder.Decode(sourceMap)\n if errDecoding != nil {\n return nil, playfab.NewCustomError(errDecoding.Error(), playfab.ErrorDecoding)\n }\n\n return result, nil\n}", "func (a *HyperflexApiService) UpdateHyperflexServerModelExecute(r ApiUpdateHyperflexServerModelRequest) (*HyperflexServerModel, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerModel\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.UpdateHyperflexServerModel\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerModels/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerModel == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerModel is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerModel\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionEntryExecute(r ApiPatchHyperflexServerFirmwareVersionEntryRequest) (*HyperflexServerFirmwareVersionEntry, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerFirmwareVersionEntry\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerFirmwareVersionEntry\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerFirmwareVersionEntries/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerFirmwareVersionEntry == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerFirmwareVersionEntry is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerFirmwareVersionEntry\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) DeleteFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsLspsConstrainedPathTunnelsTunnelP2pTunnelAttributesP2pPrimaryPath(ctx context.Context, name string, tunnelName string, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Delete\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:lsps/frinx-openconfig-network-instance:constrained-path/frinx-openconfig-network-instance:tunnels/frinx-openconfig-network-instance:tunnel/{tunnel-name}/frinx-openconfig-network-instance:p2p-tunnel-attributes/frinx-openconfig-network-instance:p2p-primary-path/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"tunnel-name\"+\"}\", fmt.Sprintf(\"%v\", tunnelName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinition(ctx context.Context, moid string) ApiPatchHyperflexHealthCheckDefinitionRequest {\n\treturn ApiPatchHyperflexHealthCheckDefinitionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func CreatePolicyWithDefaults(nbmaster string, httpClient *http.Client, jwt string) {\r\n fmt.Printf(\"\\nSending a POST request to create %s with defaults...\\n\", testPolicyName)\r\n\r\n policy := map[string]interface{}{\r\n \"data\": map[string]interface{}{\r\n \"type\": \"policy\",\r\n \"id\": testPolicyName,\r\n \"attributes\": map[string]interface{}{\r\n \"policy\": map[string]interface{}{\r\n \"policyName\": testPolicyName,\r\n \"policyType\": \"VMware\",\r\n \"policyAttributes\": map[string]interface{}{},\r\n \"clients\":[]interface{}{},\r\n \"schedules\":[]interface{}{},\r\n \"backupSelections\": map[string]interface{}{\r\n \"selections\": []interface{}{}}}}}}\r\n\r\n policyRequest, _ := json.Marshal(policy)\r\n\r\n uri := \"https://\" + nbmaster + \":\" + port + \"/netbackup/\" + policiesUri\r\n\r\n request, _ := http.NewRequest(http.MethodPost, uri, bytes.NewBuffer(policyRequest))\r\n request.Header.Add(\"Content-Type\", contentTypeV2);\r\n request.Header.Add(\"Authorization\", jwt);\r\n\r\n response, err := httpClient.Do(request)\r\n\r\n if err != nil {\r\n fmt.Printf(\"The HTTP request failed with error: %s\\n\", err)\r\n panic(\"Unable to create policy.\\n\")\r\n } else {\r\n if response.StatusCode != 204 {\r\n printErrorResponse(response)\r\n } else {\r\n fmt.Printf(\"%s created successfully.\\n\", testPolicyName);\r\n responseDetails, _ := httputil.DumpResponse(response, true);\r\n fmt.Printf(string(responseDetails))\r\n }\r\n }\r\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsLspsConstrainedPathTunnelsTunnelP2pTunnelAttributesP2pPrimaryPathP2pPrimaryPath(ctx context.Context, name string, tunnelName string, p2pPrimaryPathName string, frinxOpenconfigMplsP2pprimarypathstopP2pprimarypathP2pPrimaryPathBodyParam FrinxOpenconfigMplsP2pprimarypathstopP2pprimarypathP2pPrimaryPathRequest, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:lsps/frinx-openconfig-network-instance:constrained-path/frinx-openconfig-network-instance:tunnels/frinx-openconfig-network-instance:tunnel/{tunnel-name}/frinx-openconfig-network-instance:p2p-tunnel-attributes/frinx-openconfig-network-instance:p2p-primary-path/frinx-openconfig-network-instance:p2p-primary-path/{p2p-primary-path-name}/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"tunnel-name\"+\"}\", fmt.Sprintf(\"%v\", tunnelName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"p2p-primary-path-name\"+\"}\", fmt.Sprintf(\"%v\", p2pPrimaryPathName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsP2pprimarypathstopP2pprimarypathP2pPrimaryPathBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexCapabilityInfo(ctx context.Context, moid string) ApiPatchHyperflexCapabilityInfoRequest {\n\treturn ApiPatchHyperflexCapabilityInfoRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHypervisorHost(ctx context.Context, moid string) ApiPatchHyperflexHypervisorHostRequest {\n\treturn ApiPatchHyperflexHypervisorHostRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsLspsConstrainedPathNamedExplicitPathsNamedExplicitPathExplicitRouteObjects(ctx context.Context, name string, namedExplicitPathName string, frinxOpenconfigMplsExplicitpathstopNamedexplicitpathsNamedexplicitpathExplicitRouteObjectsBodyParam FrinxOpenconfigMplsExplicitpathstopNamedexplicitpathsNamedexplicitpathExplicitRouteObjectsRequest, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:lsps/frinx-openconfig-network-instance:constrained-path/frinx-openconfig-network-instance:named-explicit-paths/frinx-openconfig-network-instance:named-explicit-path/{named-explicit-path-name}/frinx-openconfig-network-instance:explicit-route-objects/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"named-explicit-path-name\"+\"}\", fmt.Sprintf(\"%v\", namedExplicitPathName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsExplicitpathstopNamedexplicitpathsNamedexplicitpathExplicitRouteObjectsBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (m *DeviceCompliancePolicyStateItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.DeviceCompliancePolicyStateable, requestConfiguration *DeviceCompliancePolicyStateItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *VersionsRequestBuilder) CreatePostRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ListItemVersionable, requestConfiguration *VersionsRequestBuilderPostRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.POST\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *Client) PostHyperflexCapabilityInfos(params *PostHyperflexCapabilityInfosParams) (*PostHyperflexCapabilityInfosCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPostHyperflexCapabilityInfosParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PostHyperflexCapabilityInfos\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/hyperflex/CapabilityInfos\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PostHyperflexCapabilityInfosReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PostHyperflexCapabilityInfosCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PostHyperflexCapabilityInfosDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func NewPatchMachineConfigurationDefault(code int) *PatchMachineConfigurationDefault {\n\treturn &PatchMachineConfigurationDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (m *MobileAppAssignmentItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.MobileAppAssignmentable, requestConfiguration *MobileAppAssignmentItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *AssignmentDefaultsRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.EducationAssignmentDefaultsable, requestConfiguration *AssignmentDefaultsRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (m *DeviceConfigurationItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.DeviceConfigurationable, requestConfiguration *DeviceConfigurationItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) DeleteFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsLspsConstrainedPathTunnelsTunnelP2pTunnelAttributesP2pPrimaryPathP2pPrimaryPath(ctx context.Context, name string, tunnelName string, p2pPrimaryPathName string, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Delete\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:lsps/frinx-openconfig-network-instance:constrained-path/frinx-openconfig-network-instance:tunnels/frinx-openconfig-network-instance:tunnel/{tunnel-name}/frinx-openconfig-network-instance:p2p-tunnel-attributes/frinx-openconfig-network-instance:p2p-primary-path/frinx-openconfig-network-instance:p2p-primary-path/{p2p-primary-path-name}/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"tunnel-name\"+\"}\", fmt.Sprintf(\"%v\", tunnelName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"p2p-primary-path-name\"+\"}\", fmt.Sprintf(\"%v\", p2pPrimaryPathName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (m *ShiftPreferencesRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.ShiftPreferencesable, requestConfiguration *ShiftPreferencesRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func NewObmsPatchByIDDefault(code int) *ObmsPatchByIDDefault {\n\treturn &ObmsPatchByIDDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (m *SubjectRightsRequestItemRequestBuilder) CreatePatchRequestInformation(ctx context.Context, body iadcd81124412c61e647227ecfc4449d8bba17de0380ddda76f641a29edf2b242.SubjectRightsRequestable, requestConfiguration *SubjectRightsRequestItemRequestBuilderPatchRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {\n requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()\n requestInfo.UrlTemplate = m.urlTemplate\n requestInfo.PathParameters = m.pathParameters\n requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.PATCH\n requestInfo.Headers[\"Accept\"] = \"application/json\"\n requestInfo.SetContentFromParsable(ctx, m.requestAdapter, \"application/json\", body)\n if requestConfiguration != nil {\n requestInfo.AddRequestHeaders(requestConfiguration.Headers)\n requestInfo.AddRequestOptions(requestConfiguration.Options)\n }\n return requestInfo, nil\n}", "func (a *HyperflexApiService) PatchHyperflexHypervisorVirtualMachineExecute(r ApiPatchHyperflexHypervisorVirtualMachineRequest) (*HyperflexHypervisorVirtualMachine, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexHypervisorVirtualMachine\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexHypervisorVirtualMachine\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/HypervisorVirtualMachines/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexHypervisorVirtualMachine == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexHypervisorVirtualMachine is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexHypervisorVirtualMachine\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (client *ManagedClustersClient) createOrUpdateCreateRequest(ctx context.Context, resourceGroupName string, resourceName string, parameters ManagedCluster, options *ManagedClustersClientBeginCreateOrUpdateOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif resourceName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceName}\", url.PathEscape(resourceName))\n\treq, err := runtime.NewRequest(ctx, http.MethodPut, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2020-11-01\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, runtime.MarshalAsJSON(req, parameters)\n}", "func CreateBucket(w http.ResponseWriter, r *http.Request) *appError {\n decoder := json.NewDecoder(r.Body)\n var ecsBucket ECSBucket\n err := decoder.Decode(&ecsBucket)\n if err != nil {\n return &appError{err: err, status: http.StatusBadRequest, json: \"Can't decode JSON data\"}\n }\n headers := make(map[string][]string)\n if ecsBucket.ReplicationGroup != \"\" {\n headers[\"x-emc-vpool\"] = []string{ecsBucket.ReplicationGroup}\n }\n if ecsBucket.MetadataSearch != \"\" {\n headers[\"x-emc-metadata-search\"] = []string{ecsBucket.MetadataSearch}\n }\n if ecsBucket.EnableADO {\n headers[\"x-emc-is-stale-allowed\"] = []string{\"true\"}\n } else {\n headers[\"x-emc-is-stale-allowed\"] = []string{\"false\"}\n }\n if ecsBucket.EnableFS {\n headers[\"x-emc-file-system-access-enabled\"] = []string{\"true\"}\n } else {\n headers[\"x-emc-file-system-access-enabled\"] = []string{\"false\"}\n }\n if ecsBucket.EnableCompliance {\n headers[\"x-emc-compliance-enabled\"] = []string{\"true\"}\n } else {\n headers[\"x-emc-compliance-enabled\"] = []string{\"false\"}\n }\n if ecsBucket.EnableEncryption {\n headers[\"x-emc-server-side-encryption-enabled\"] = []string{\"true\"}\n } else {\n headers[\"x-emc-server-side-encryption-enabled\"] = []string{\"false\"}\n }\n retentionEnabled := false\n headers[\"x-emc-retention-period\"] = []string{\"0\"}\n if ecsBucket.Retention != \"\" {\n days, err := strconv.ParseInt(ecsBucket.Retention, 10, 64)\n if err == nil {\n if days > 0 {\n seconds := days * 24 * 3600\n headers[\"x-emc-retention-period\"] = []string{int64toString(seconds)}\n retentionEnabled = true\n }\n }\n }\n var expirationCurrentVersions int64\n expirationCurrentVersions = 0\n if ecsBucket.ExpirationCurrentVersions != \"\" {\n days, err := strconv.ParseInt(ecsBucket.ExpirationCurrentVersions, 10, 64)\n if err == nil {\n expirationCurrentVersions = days\n }\n }\n var expirationNonCurrentVersions int64\n expirationNonCurrentVersions = 0\n if ecsBucket.ExpirationNonCurrentVersions != \"\" {\n days, err := strconv.ParseInt(ecsBucket.ExpirationNonCurrentVersions, 10, 64)\n if err == nil && ecsBucket.EnableVersioning {\n expirationNonCurrentVersions = days\n }\n }\n var bucketCreateResponse Response\n if ecsBucket.Api == \"s3\" {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n bucketCreateResponse, err = s3Request(s3, ecsBucket.Name, \"PUT\", \"/\", headers, \"\")\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: err.Error()}\n }\n versioningStatusOK := true\n lifecyclePolicyStatusOK := true\n // If the bucket has been created\n if bucketCreateResponse.Code == 200 {\n if !retentionEnabled && ecsBucket.EnableVersioning {\n // Enable versioning\n enableVersioningHeaders := map[string][]string{}\n enableVersioningHeaders[\"Content-Type\"] = []string{\"application/xml\"}\n versioningConfiguration := `\n <VersioningConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\n <Status>Enabled</Status>\n <MfaDelete>Disabled</MfaDelete>\n </VersioningConfiguration>\n `\n enableVersioningResponse, _ := s3Request(s3, ecsBucket.Name, \"PUT\", \"/?versioning\", enableVersioningHeaders, versioningConfiguration)\n if enableVersioningResponse.Code != 200 {\n versioningStatusOK = false\n }\n }\n if expirationCurrentVersions > 0 || expirationNonCurrentVersions > 0 {\n lifecyclePolicyHeaders := map[string][]string{}\n lifecyclePolicyHeaders[\"Content-Type\"] = []string{\"application/xml\"}\n lifecyclePolicyConfiguration := `\n <LifecycleConfiguration>\n <Rule>\n <ID>expiration</ID>\n <Prefix></Prefix>\n <Status>Enabled</Status>\n `\n if expirationCurrentVersions > 0 && expirationNonCurrentVersions > 0 {\n // Enable expiration for both current and non current versions\n lifecyclePolicyConfiguration += \"<Expiration><Days>\" + ecsBucket.ExpirationCurrentVersions + \"</Days></Expiration>\"\n lifecyclePolicyConfiguration += \"<NoncurrentVersionExpiration><NoncurrentDays>\" + ecsBucket.ExpirationNonCurrentVersions + \"</NoncurrentDays></NoncurrentVersionExpiration>\"\n } else {\n if expirationCurrentVersions > 0 {\n // Enable expiration for current versions only\n lifecyclePolicyConfiguration += \"<Expiration><Days>\" + ecsBucket.ExpirationCurrentVersions + \"</Days></Expiration>\"\n }\n if expirationNonCurrentVersions > 0 {\n // Enable expiration for non current versions only\n // To fix a bug in ECS 3.0 where an expiration for non current version can't be set if there's no expiration set for current versions\n lifecyclePolicyConfiguration += \"<Expiration><Days>1000000</Days></Expiration>\"\n lifecyclePolicyConfiguration += \"<NoncurrentVersionExpiration><NoncurrentDays>\" + ecsBucket.ExpirationNonCurrentVersions + \"</NoncurrentDays></NoncurrentVersionExpiration>\"\n }\n }\n lifecyclePolicyConfiguration += `\n </Rule>\n </LifecycleConfiguration>\n `\n lifecyclePolicyResponse, _ := s3Request(s3, ecsBucket.Name, \"PUT\", \"/?lifecycle\", lifecyclePolicyHeaders, lifecyclePolicyConfiguration)\n if lifecyclePolicyResponse.Code != 200 {\n lifecyclePolicyStatusOK = false\n }\n }\n if versioningStatusOK && lifecyclePolicyStatusOK {\n rendering.JSON(w, http.StatusOK, \"\")\n } else {\n message := \"\"\n if !versioningStatusOK {\n message += \" Versioning can't be enabled.\"\n }\n if !lifecyclePolicyStatusOK {\n message += \" Expiration can't be set.\"\n }\n rendering.JSON(w, http.StatusOK, message)\n }\n } else {\n return &appError{err: err, status: http.StatusInternalServerError, xml: bucketCreateResponse.Body}\n }\n } else if ecsBucket.Api == \"swift\" {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n bucketCreateResponse, err = swiftRequest(ecsBucket.Endpoint, s3.AccessKey, ecsBucket.Password, ecsBucket.Name, \"PUT\", \"/\", headers, \"\")\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: err.Error()}\n }\n if bucketCreateResponse.Code >= 200 && bucketCreateResponse.Code < 300 {\n rendering.JSON(w, http.StatusOK, ecsBucket.Name)\n } else {\n return &appError{err: err, status: http.StatusInternalServerError, xml: bucketCreateResponse.Body}\n }\n } else if ecsBucket.Api == \"atmos\" {\n s3, err := getS3(r)\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: http.StatusText(http.StatusInternalServerError)}\n }\n bucketCreateResponse, err = atmosRequest(ecsBucket.Endpoint, s3.AccessKey, s3.SecretKey, \"\", \"PUT\", \"/rest/subtenant\", headers, \"\")\n if err != nil {\n return &appError{err: err, status: http.StatusInternalServerError, json: err.Error()}\n }\n if bucketCreateResponse.Code >= 200 && bucketCreateResponse.Code < 300 {\n rendering.JSON(w, http.StatusOK, bucketCreateResponse.ResponseHeaders[\"Subtenantid\"][0])\n } else {\n return &appError{err: err, status: http.StatusInternalServerError, xml: bucketCreateResponse.Body}\n }\n }\n\n return nil\n}", "func CreateModifyHostAvailabilityRequest() (request *ModifyHostAvailabilityRequest) {\n\trequest = &ModifyHostAvailabilityRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"Cms\", \"2019-01-01\", \"ModifyHostAvailability\", \"cms\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}", "func NewPatchManagementEntitiesMoidDefault(code int) *PatchManagementEntitiesMoidDefault {\n\treturn &PatchManagementEntitiesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func ExampleVirtualMachinesClient_BeginCreateOrUpdate() {\n\tcred, err := azidentity.NewDefaultAzureCredential(nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to obtain a credential: %v\", err)\n\t}\n\tctx := context.Background()\n\tclient, err := armscvmm.NewVirtualMachinesClient(\"fd3c3665-1729-4b7b-9a38-238e83b0f98b\", cred, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create client: %v\", err)\n\t}\n\tpoller, err := client.BeginCreateOrUpdate(ctx,\n\t\t\"testrg\",\n\t\t\"DemoVM\",\n\t\tarmscvmm.VirtualMachine{\n\t\t\tExtendedLocation: &armscvmm.ExtendedLocation{\n\t\t\t\tName: to.Ptr(\"/subscriptions/a5015e1c-867f-4533-8541-85cd470d0cfb/resourceGroups/demoRG/providers/Microsoft.Arc/customLocations/contoso\"),\n\t\t\t\tType: to.Ptr(\"customLocation\"),\n\t\t\t},\n\t\t\tLocation: to.Ptr(\"East US\"),\n\t\t\tProperties: &armscvmm.VirtualMachineProperties{\n\t\t\t\tCloudID: to.Ptr(\"/subscriptions/fd3c3665-1729-4b7b-9a38-238e83b0f98b/resourceGroups/testrg/providers/Microsoft.SCVMM/Clouds/HRCloud\"),\n\t\t\t\tHardwareProfile: &armscvmm.HardwareProfile{\n\t\t\t\t\tCPUCount: to.Ptr[int32](4),\n\t\t\t\t\tMemoryMB: to.Ptr[int32](4096),\n\t\t\t\t},\n\t\t\t\tTemplateID: to.Ptr(\"/subscriptions/fd3c3665-1729-4b7b-9a38-238e83b0f98b/resourceGroups/testrg/providers/Microsoft.SCVMM/VirtualMachineTemplates/HRVirtualMachineTemplate\"),\n\t\t\t\tVmmServerID: to.Ptr(\"/subscriptions/fd3c3665-1729-4b7b-9a38-238e83b0f98b/resourceGroups/testrg/providers/Microsoft.SCVMM/VMMServers/ContosoVMMServer\"),\n\t\t\t},\n\t\t},\n\t\tnil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to finish the request: %v\", err)\n\t}\n\tres, err := poller.PollUntilDone(ctx, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to pull the result: %v\", err)\n\t}\n\t// TODO: use response item\n\t_ = res\n}", "func (client *VirtualMachinesClient) installPatchesCreateRequest(ctx context.Context, resourceGroupName string, name string, installPatchesInput VirtualMachineInstallPatchesParameters, options *VirtualMachinesClientBeginInstallPatchesOptions) (*policy.Request, error) {\n\turlPath := \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ConnectedVMwarevSphere/virtualMachines/{name}/installPatches\"\n\tif client.subscriptionID == \"\" {\n\t\treturn nil, errors.New(\"parameter client.subscriptionID cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{subscriptionId}\", url.PathEscape(client.subscriptionID))\n\tif resourceGroupName == \"\" {\n\t\treturn nil, errors.New(\"parameter resourceGroupName cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{resourceGroupName}\", url.PathEscape(resourceGroupName))\n\tif name == \"\" {\n\t\treturn nil, errors.New(\"parameter name cannot be empty\")\n\t}\n\turlPath = strings.ReplaceAll(urlPath, \"{name}\", url.PathEscape(name))\n\treq, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.internal.Endpoint(), urlPath))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treqQP := req.Raw().URL.Query()\n\treqQP.Set(\"api-version\", \"2022-01-10-preview\")\n\treq.Raw().URL.RawQuery = reqQP.Encode()\n\treq.Raw().Header[\"Accept\"] = []string{\"application/json\"}\n\treturn req, runtime.MarshalAsJSON(req, installPatchesInput)\n}", "func (a *HyperflexApiService) PatchHyperflexAppCatalog(ctx context.Context, moid string) ApiPatchHyperflexAppCatalogRequest {\n\treturn ApiPatchHyperflexAppCatalogRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}" ]
[ "0.6463573", "0.6011306", "0.5853393", "0.5564866", "0.54266864", "0.5424272", "0.5382946", "0.5317569", "0.5276128", "0.5265758", "0.52430844", "0.5211391", "0.51730454", "0.5164064", "0.51593494", "0.5128178", "0.5002956", "0.49900427", "0.49820733", "0.48817182", "0.48650065", "0.48533982", "0.48239967", "0.48061916", "0.4799206", "0.47807443", "0.47794232", "0.4775439", "0.47631925", "0.47564268", "0.4737574", "0.4737563", "0.47304243", "0.4727436", "0.47265634", "0.47222215", "0.47201434", "0.47148913", "0.47020724", "0.47019318", "0.4697584", "0.46952006", "0.46903864", "0.46790403", "0.4673561", "0.46712402", "0.4668383", "0.4666413", "0.46601078", "0.46575642", "0.46562728", "0.4648167", "0.4643713", "0.46359974", "0.46337214", "0.46299526", "0.46296754", "0.4596289", "0.4595918", "0.4589187", "0.45869714", "0.4583643", "0.4578703", "0.45669663", "0.45642132", "0.4554308", "0.45474136", "0.45461333", "0.45433715", "0.45417568", "0.4541641", "0.45392743", "0.45276093", "0.45238316", "0.4517676", "0.45145142", "0.45101273", "0.451003", "0.4497223", "0.44970334", "0.44947472", "0.4479417", "0.44704142", "0.44641834", "0.4460614", "0.44591895", "0.44560847", "0.44545755", "0.44528335", "0.4451317", "0.44485673", "0.44483346", "0.44474995", "0.44451666", "0.44442546", "0.44408393", "0.44392857", "0.443905", "0.4433928", "0.4431209" ]
0.6815672
0
NewPatchHyperflexServerModelsMoidDefault creates a PatchHyperflexServerModelsMoidDefault with default headers values
func NewPatchHyperflexServerModelsMoidDefault(code int) *PatchHyperflexServerModelsMoidDefault { return &PatchHyperflexServerModelsMoidDefault{ _statusCode: code, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (a *HyperflexApiService) PatchHyperflexServerModel(ctx context.Context, moid string) ApiPatchHyperflexServerModelRequest {\n\treturn ApiPatchHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPatchHyperflexSoftwareVersionPoliciesMoidDefault(code int) *PatchHyperflexSoftwareVersionPoliciesMoidDefault {\n\treturn &PatchHyperflexSoftwareVersionPoliciesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func NewGetHyperflexServerModelsMoidDefault(code int) *GetHyperflexServerModelsMoidDefault {\n\treturn &GetHyperflexServerModelsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersion(ctx context.Context, moid string) ApiPatchHyperflexServerFirmwareVersionRequest {\n\treturn ApiPatchHyperflexServerFirmwareVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerModelExecute(r ApiPatchHyperflexServerModelRequest) (*HyperflexServerModel, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerModel\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerModel\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerModels/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerModel == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerModel is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerModel\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func NewPatchMachineConfigurationDefault(code int) *PatchMachineConfigurationDefault {\n\treturn &PatchMachineConfigurationDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHxdpVersion(ctx context.Context, moid string) ApiPatchHyperflexHxdpVersionRequest {\n\treturn ApiPatchHyperflexHxdpVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewObmsPatchByIDDefault(code int) *ObmsPatchByIDDefault {\n\treturn &ObmsPatchByIDDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionEntry(ctx context.Context, moid string) ApiPatchHyperflexServerFirmwareVersionEntryRequest {\n\treturn ApiPatchHyperflexServerFirmwareVersionEntryRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPatchVnicEthNetworkPoliciesMoidDefault(code int) *PatchVnicEthNetworkPoliciesMoidDefault {\n\treturn &PatchVnicEthNetworkPoliciesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func NewPostHyperflexHxdpVersionsMoidDefault(code int) *PostHyperflexHxdpVersionsMoidDefault {\n\treturn &PostHyperflexHxdpVersionsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *Client) PatchHyperflexCapabilityInfosMoid(params *PatchHyperflexCapabilityInfosMoidParams) (*PatchHyperflexCapabilityInfosMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchHyperflexCapabilityInfosMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchHyperflexCapabilityInfosMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/hyperflex/CapabilityInfos/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchHyperflexCapabilityInfosMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchHyperflexCapabilityInfosMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchHyperflexCapabilityInfosMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func NewPatchApplianceDiagSettingsMoidDefault(code int) *PatchApplianceDiagSettingsMoidDefault {\n\treturn &PatchApplianceDiagSettingsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) UpdateHyperflexServerModel(ctx context.Context, moid string) ApiUpdateHyperflexServerModelRequest {\n\treturn ApiUpdateHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPatchGraphicsControllersMoidDefault(code int) *PatchGraphicsControllersMoidDefault {\n\treturn &PatchGraphicsControllersMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func NewPatchHyperflexServerModelsMoidCreated() *PatchHyperflexServerModelsMoidCreated {\n\treturn &PatchHyperflexServerModelsMoidCreated{}\n}", "func (r ApiPatchHyperflexServerModelRequest) HyperflexServerModel(hyperflexServerModel HyperflexServerModel) ApiPatchHyperflexServerModelRequest {\n\tr.hyperflexServerModel = &hyperflexServerModel\n\treturn r\n}", "func NewIbmsPatchByIDDefault(code int) *IbmsPatchByIDDefault {\n\treturn &IbmsPatchByIDDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexTarget(ctx context.Context, moid string) ApiPatchHyperflexTargetRequest {\n\treturn ApiPatchHyperflexTargetRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexInitiatorGroup(ctx context.Context, moid string) ApiPatchHyperflexInitiatorGroupRequest {\n\treturn ApiPatchHyperflexInitiatorGroupRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPatchApplianceUpgradePoliciesMoidDefault(code int) *PatchApplianceUpgradePoliciesMoidDefault {\n\treturn &PatchApplianceUpgradePoliciesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func NewPatchManagementEntitiesMoidDefault(code int) *PatchManagementEntitiesMoidDefault {\n\treturn &PatchManagementEntitiesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *Client) PatchHyperflexClusterProfilesMoid(params *PatchHyperflexClusterProfilesMoidParams) (*PatchHyperflexClusterProfilesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchHyperflexClusterProfilesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchHyperflexClusterProfilesMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/hyperflex/ClusterProfiles/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchHyperflexClusterProfilesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchHyperflexClusterProfilesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchHyperflexClusterProfilesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func (req *Request) DefaultHeaders(mKey string) (err error) {\n\treq.Header.Add(HeaderXDate, time.Now().UTC().Format(\"Mon, 02 Jan 2006 15:04:05 GMT\"))\n\treq.Header.Add(HeaderVersion, SupportedAPIVersion)\n\treq.Header.Add(HeaderUserAgent, UserAgent)\n\n\t// Auth\n\tparts := req.Method + \"\\n\" +\n\t\treq.rType + \"\\n\" +\n\t\treq.rLink + \"\\n\" +\n\t\treq.Header.Get(HeaderXDate) + \"\\n\" +\n\t\treq.Header.Get(\"Date\") + \"\\n\"\n\n\tpartsLower := strings.ToLower(parts)\n\n\tsign, err := authorize(partsLower, mKey)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tmasterToken := \"master\"\n\ttokenVersion := \"1.0\"\n\treq.Header.Add(HeaderAuth, url.QueryEscape(\"type=\"+masterToken+\"&ver=\"+tokenVersion+\"&sig=\"+sign))\n\treturn\n}", "func (a *HyperflexApiService) PatchHyperflexIscsiNetwork(ctx context.Context, moid string) ApiPatchHyperflexIscsiNetworkRequest {\n\treturn ApiPatchHyperflexIscsiNetworkRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexDrive(ctx context.Context, moid string) ApiPatchHyperflexDriveRequest {\n\treturn ApiPatchHyperflexDriveRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *Client) PatchHyperflexClusterStoragePoliciesMoid(params *PatchHyperflexClusterStoragePoliciesMoidParams) (*PatchHyperflexClusterStoragePoliciesMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPatchHyperflexClusterStoragePoliciesMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PatchHyperflexClusterStoragePoliciesMoid\",\n\t\tMethod: \"PATCH\",\n\t\tPathPattern: \"/hyperflex/ClusterStoragePolicies/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PatchHyperflexClusterStoragePoliciesMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PatchHyperflexClusterStoragePoliciesMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PatchHyperflexClusterStoragePoliciesMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func NewPatchLicenseCustomerOpsMoidDefault(code int) *PatchLicenseCustomerOpsMoidDefault {\n\treturn &PatchLicenseCustomerOpsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) CreateHyperflexServerModel(ctx context.Context) ApiCreateHyperflexServerModelRequest {\n\treturn ApiCreateHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHypervisorVirtualMachine(ctx context.Context, moid string) ApiPatchHyperflexHypervisorVirtualMachineRequest {\n\treturn ApiPatchHyperflexHypervisorVirtualMachineRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func patchDefaultPodSpecs(defaultPodSpecs v1.PodSpec, override crv1alpha1.JSONMap) (v1.PodSpec, error) {\n\t// Merge default specs and override specs with StrategicMergePatch\n\tmergedPatch, err := strategicMergeJsonPatch(defaultPodSpecs, override)\n\tif err != nil {\n\t\treturn v1.PodSpec{}, err\n\t}\n\n\t// Convert merged json to v1.PodSPec object\n\tpodSpec := v1.PodSpec{}\n\terr = json.Unmarshal(mergedPatch, &podSpec)\n\tif err != nil {\n\t\treturn podSpec, err\n\t}\n\treturn podSpec, err\n}", "func NewDefaultRESTMapper(versions []string, f VersionInterfacesFunc) *DefaultRESTMapper {\n\tmapping := make(map[string]typeMeta)\n\treverse := make(map[typeMeta]string)\n\tscopes := make(map[typeMeta]RESTScope)\n\t// TODO: verify name mappings work correctly when versions differ\n\n\treturn &DefaultRESTMapper{\n\t\tmapping: mapping,\n\t\treverse: reverse,\n\t\tscopes: scopes,\n\t\tversions: versions,\n\t\tinterfacesFunc: f,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionExecute(r ApiPatchHyperflexServerFirmwareVersionRequest) (*HyperflexServerFirmwareVersion, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerFirmwareVersion\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerFirmwareVersion\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerFirmwareVersions/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerFirmwareVersion == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerFirmwareVersion is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerFirmwareVersion\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func setDefaultFields(partialConfigOut *cellularPartialMconfig) {\n\tpartialConfigOut.Enodebd.LogLevel = protos.LogLevel_INFO\n\tpartialConfigOut.Mobilityd.LogLevel = protos.LogLevel_INFO\n\tpartialConfigOut.Mme.LogLevel = protos.LogLevel_INFO\n\tpartialConfigOut.Pipelined.LogLevel = protos.LogLevel_INFO\n\tpartialConfigOut.Subscriberdb.LogLevel = protos.LogLevel_INFO\n\tpartialConfigOut.Policydb.LogLevel = protos.LogLevel_INFO\n\tpartialConfigOut.Sessiond.LogLevel = protos.LogLevel_INFO\n\n\tpartialConfigOut.Mme.MmeCode = 1\n\tpartialConfigOut.Mme.MmeGid = 1\n}", "func (mw *azureManagedControlPlaneWebhook) Default(ctx context.Context, obj runtime.Object) error {\n\tm, ok := obj.(*AzureManagedControlPlane)\n\tif !ok {\n\t\treturn apierrors.NewBadRequest(\"expected an AzureManagedControlPlane\")\n\t}\n\tif m.Spec.NetworkPlugin == nil {\n\t\tnetworkPlugin := \"azure\"\n\t\tm.Spec.NetworkPlugin = &networkPlugin\n\t}\n\tif m.Spec.LoadBalancerSKU == nil {\n\t\tloadBalancerSKU := \"Standard\"\n\t\tm.Spec.LoadBalancerSKU = &loadBalancerSKU\n\t}\n\n\tif m.Spec.Version != \"\" && !strings.HasPrefix(m.Spec.Version, \"v\") {\n\t\tnormalizedVersion := \"v\" + m.Spec.Version\n\t\tm.Spec.Version = normalizedVersion\n\t}\n\n\tif m.Spec.Identity == nil {\n\t\tm.Spec.Identity = &Identity{\n\t\t\tType: ManagedControlPlaneIdentityTypeSystemAssigned,\n\t\t}\n\t}\n\n\tif err := m.setDefaultSSHPublicKey(); err != nil {\n\t\tctrl.Log.WithName(\"AzureManagedControlPlaneWebHookLogger\").Error(err, \"setDefaultSSHPublicKey failed\")\n\t}\n\n\tm.setDefaultNodeResourceGroupName()\n\tm.setDefaultVirtualNetwork()\n\tm.setDefaultSubnet()\n\tm.setDefaultSku()\n\tm.setDefaultAutoScalerProfile()\n\n\treturn nil\n}", "func NewGetHyperflexServerFirmwareVersionsDefault(code int) *GetHyperflexServerFirmwareVersionsDefault {\n\treturn &GetHyperflexServerFirmwareVersionsDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexLun(ctx context.Context, moid string) ApiPatchHyperflexLunRequest {\n\treturn ApiPatchHyperflexLunRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func defaultModel(namespaces []string, protocols []v1.Protocol, ports []int32) *Model {\n\tif framework.NodeOSDistroIs(\"windows\") {\n\t\treturn NewWindowsModel(namespaces, []string{\"a\", \"b\", \"c\"}, ports)\n\t}\n\treturn NewModel(namespaces, []string{\"a\", \"b\", \"c\"}, ports, protocols)\n}", "func (a *HyperflexApiService) PatchHyperflexCluster(ctx context.Context, moid string) ApiPatchHyperflexClusterRequest {\n\treturn ApiPatchHyperflexClusterRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHypervisorHost(ctx context.Context, moid string) ApiPatchHyperflexHypervisorHostRequest {\n\treturn ApiPatchHyperflexHypervisorHostRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexFeatureLimitInternal(ctx context.Context, moid string) ApiPatchHyperflexFeatureLimitInternalRequest {\n\treturn ApiPatchHyperflexFeatureLimitInternalRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func addingDefaultFieldsToSchema(crdName string, schemaRaw []byte) ([]byte, error) {\n\tvar schema struct {\n\t\tProperties map[string]interface{} `json:\"properties\"`\n\t}\n\t_ = json.Unmarshal(schemaRaw, &schema)\n\n\tif len(schema.Properties) < 1 {\n\t\tlogging.V(6).Info(\"crd schema has no properties\", \"name\", crdName)\n\t\treturn schemaRaw, nil\n\t}\n\n\tif schema.Properties[\"apiVersion\"] == nil {\n\t\tapiVersionDefRaw := `{\"description\":\"APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources\",\"type\":\"string\"}`\n\t\tapiVersionDef := make(map[string]interface{})\n\t\t_ = json.Unmarshal([]byte(apiVersionDefRaw), &apiVersionDef)\n\t\tschema.Properties[\"apiVersion\"] = apiVersionDef\n\t}\n\n\tif schema.Properties[\"metadata\"] == nil {\n\t\tmetadataDefRaw := `{\"$ref\":\"#/definitions/io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta\",\"description\":\"Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata\"}`\n\t\tmetadataDef := make(map[string]interface{})\n\t\t_ = json.Unmarshal([]byte(metadataDefRaw), &metadataDef)\n\t\tschema.Properties[\"metadata\"] = metadataDef\n\t}\n\n\tschemaWithDefaultFields, _ := json.Marshal(schema)\n\n\treturn schemaWithDefaultFields, nil\n}", "func (a *HyperflexApiService) UpdateHyperflexServerModelExecute(r ApiUpdateHyperflexServerModelRequest) (*HyperflexServerModel, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerModel\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.UpdateHyperflexServerModel\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerModels/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerModel == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerModel is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerModel\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexNodeProfile(ctx context.Context, moid string) ApiPatchHyperflexNodeProfileRequest {\n\treturn ApiPatchHyperflexNodeProfileRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewHyperflexServerFirmwareVersionEntryAllOfWithDefaults() *HyperflexServerFirmwareVersionEntryAllOf {\n\tthis := HyperflexServerFirmwareVersionEntryAllOf{}\n\tvar classId string = \"hyperflex.ServerFirmwareVersionEntry\"\n\tthis.ClassId = classId\n\tvar objectType string = \"hyperflex.ServerFirmwareVersionEntry\"\n\tthis.ObjectType = objectType\n\tvar serverPlatform string = \"M5\"\n\tthis.ServerPlatform = &serverPlatform\n\treturn &this\n}", "func (c *Client) ModifyLaunchTemplateDefaultVersion(request *ModifyLaunchTemplateDefaultVersionRequest) (response *ModifyLaunchTemplateDefaultVersionResponse, err error) {\n return c.ModifyLaunchTemplateDefaultVersionWithContext(context.Background(), request)\n}", "func (a *HyperflexApiService) PatchHyperflexServerFirmwareVersionEntryExecute(r ApiPatchHyperflexServerFirmwareVersionEntryRequest) (*HyperflexServerFirmwareVersionEntry, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerFirmwareVersionEntry\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexServerFirmwareVersionEntry\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerFirmwareVersionEntries/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerFirmwareVersionEntry == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerFirmwareVersionEntry is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerFirmwareVersionEntry\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexCapabilityInfo(ctx context.Context, moid string) ApiPatchHyperflexCapabilityInfoRequest {\n\treturn ApiPatchHyperflexCapabilityInfoRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func DefaultHeader(k, v string) Opt {\n\treturn func(c *Client) Opt {\n\t\told, found := c.header[k]\n\t\told = append([]string{}, old...) // clone\n\t\tc.header.Add(k, v)\n\t\treturn func(c *Client) Opt {\n\t\t\tif found {\n\t\t\t\tc.header[k] = old\n\t\t\t} else {\n\t\t\t\tc.header.Del(k)\n\t\t\t}\n\t\t\treturn DefaultHeader(k, v)\n\t\t}\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexClusterNetworkPolicy(ctx context.Context, moid string) ApiPatchHyperflexClusterNetworkPolicyRequest {\n\treturn ApiPatchHyperflexClusterNetworkPolicyRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewDefaultVersion(spec string) Version {\n\treturn &DefaultVersion{spec, map[*Capability]bool{}}\n}", "func NewCfnModuleDefaultVersion_Override(c CfnModuleDefaultVersion, scope constructs.Construct, id *string, props *CfnModuleDefaultVersionProps) {\n\t_init_.Initialize()\n\n\t_jsii_.Create(\n\t\t\"aws-cdk-lib.aws_cloudformation.CfnModuleDefaultVersion\",\n\t\t[]interface{}{scope, id, props},\n\t\tc,\n\t)\n}", "func NewPatchEquipmentIoCardsMoidDefault(code int) *PatchEquipmentIoCardsMoidDefault {\n\treturn &PatchEquipmentIoCardsMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexFeatureLimitExternal(ctx context.Context, moid string) ApiPatchHyperflexFeatureLimitExternalRequest {\n\treturn ApiPatchHyperflexFeatureLimitExternalRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func SetDefaultHeaders(w http.ResponseWriter) {\n\tfor k, v := range defaultHeaders {\n\t\tw.Header().Set(k, v)\n\t}\n}", "func (r ApiUpdateHyperflexServerModelRequest) HyperflexServerModel(hyperflexServerModel HyperflexServerModel) ApiUpdateHyperflexServerModelRequest {\n\tr.hyperflexServerModel = &hyperflexServerModel\n\treturn r\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceProtocolsProtocolBgpGlobalDefaultRouteDistanceConfig(ctx context.Context, name string, identifier string, protocolName string, frinxOpenconfigBgpBgpglobalbaseDefaultroutedistanceConfigBodyParam FrinxOpenconfigBgpBgpglobalbaseDefaultroutedistanceConfigRequest, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:protocols/frinx-openconfig-network-instance:protocol/{identifier}/{protocol-name}/frinx-openconfig-network-instance:bgp/frinx-openconfig-network-instance:global/frinx-openconfig-network-instance:default-route-distance/frinx-openconfig-network-instance:config/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"identifier\"+\"}\", fmt.Sprintf(\"%v\", identifier), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"protocol-name\"+\"}\", fmt.Sprintf(\"%v\", protocolName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigBgpBgpglobalbaseDefaultroutedistanceConfigBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func (client *RedfishClient) SetDefaultBootOrder(device *RedfishDevice, uri string) (*http.Response, error) {\n\n\tendpoint := \"https://\" + device.Host + uri\n\n\treq, err := http.NewRequest(http.MethodPost, endpoint, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treq.Close = true\n\treq.Header.Set(\"Accept\", \"application/json\")\n\tauth := device.Username + \":\" + string(device.Password)\n\tBasicauth := \"Basic \" + base64.StdEncoding.EncodeToString([]byte(auth))\n\treq.Header.Add(\"Authorization\", Basicauth)\n\treq.Header.Set(\"Content-Type\", \"application/json\")\n\treq.Close = true\n\tvar resp *http.Response\n\tlutilconf.TLSConfMutex.RLock()\n\tresp, err = client.httpClient.Do(req)\n\tlutilconf.TLSConfMutex.RUnlock()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn resp, nil\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceProtocolsProtocolBgpGlobalDefaultRouteDistance(ctx context.Context, name string, identifier string, protocolName string, frinxOpenconfigBgpBgpglobalbaseDefaultRouteDistanceBodyParam FrinxOpenconfigBgpBgpglobalbaseDefaultRouteDistanceRequest, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:protocols/frinx-openconfig-network-instance:protocol/{identifier}/{protocol-name}/frinx-openconfig-network-instance:bgp/frinx-openconfig-network-instance:global/frinx-openconfig-network-instance:default-route-distance/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"identifier\"+\"}\", fmt.Sprintf(\"%v\", identifier), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"protocol-name\"+\"}\", fmt.Sprintf(\"%v\", protocolName), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigBgpBgpglobalbaseDefaultRouteDistanceBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func NewDefaultRouterTable(service string, conf *cheshire.ServerConfig) (*RouterTable, error) {\n\trt := NewRouterTable(service)\n\t//create new router entry\n\tentry := dynmap.New()\n\tjsonPort, ok := conf.GetInt(\"ports.json\")\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"No ports.json in server config\")\n\t}\n\tentry.PutWithDot(\"ports.json\", jsonPort)\n\n\n\thttpPort, ok := conf.GetInt(\"ports.http\")\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"No ports.http in server config\")\n\t}\n\tentry.PutWithDot(\"ports.http\", httpPort)\n\t\n\tbinPort, ok := conf.GetInt(\"ports.bin\")\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"No ports.bin in server config\")\n\t}\n\tentry.PutWithDot(\"ports.bin\", binPort)\n\n\tbroadcastAddress,ok := conf.GetString(\"broadcast_address\")\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"No broadcast_address found in server config\")\n\t}\n\n\tentry.Put(\"address\", broadcastAddress)\n\n\tpartitions := make([]int, 512)\n\t//add all partitions\n\tfor i :=0; i <512; i++{\n\t\tpartitions[i] = i\n\t}\n\tentry.Put(\"partitions\", partitions)\n\n\te, err := ToRouterEntry(entry)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trt, err = rt.AddEntries(e)\n\treturn rt, err\n}", "func NewListClusterPatchPatchesDefault(code int) *ListClusterPatchPatchesDefault {\n\treturn &ListClusterPatchPatchesDefault{\n\t\t_statusCode: code,\n\t}\n}", "func DefaultPatchHealthMenstruationPersonalInfo(ctx context.Context, in *HealthMenstruationPersonalInfo, updateMask *field_mask1.FieldMask, db *gorm1.DB) (*HealthMenstruationPersonalInfo, error) {\n\tif in == nil {\n\t\treturn nil, errors1.NilArgumentError\n\t}\n\tvar pbObj HealthMenstruationPersonalInfo\n\tvar err error\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchRead); ok {\n\t\tif db, err = hook.BeforePatchRead(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbReadRes, err := DefaultReadHealthMenstruationPersonalInfo(ctx, &HealthMenstruationPersonalInfo{Id: in.GetId()}, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tpbObj = *pbReadRes\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchApplyFieldMask); ok {\n\t\tif db, err = hook.BeforePatchApplyFieldMask(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tif _, err := DefaultApplyFieldMaskHealthMenstruationPersonalInfo(ctx, &pbObj, in, updateMask, \"\", db); err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(&pbObj).(HealthMenstruationPersonalInfoWithBeforePatchSave); ok {\n\t\tif db, err = hook.BeforePatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\tpbResponse, err := DefaultStrictUpdateHealthMenstruationPersonalInfo(ctx, &pbObj, db)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif hook, ok := interface{}(pbResponse).(HealthMenstruationPersonalInfoWithAfterPatchSave); ok {\n\t\tif err = hook.AfterPatchSave(ctx, in, updateMask, db); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\treturn pbResponse, nil\n}", "func (m *MetaSpec) CloneDefaultMeta() *MetaSpec {\n\tret := *m\n\tret.MetaFile = defaultMetaFile\n\treturn &ret\n}", "func GetDefaultHeaders() map[string]string {\n\treturn map[string]string{\n\t\t\"content-type\": \"application/json\",\n\t}\n}", "func NewPatchAddonDefault(code int) *PatchAddonDefault {\n\treturn &PatchAddonDefault{\n\t\t_statusCode: code,\n\t}\n}", "func NewPostHyperflexClusterProfilesMoidDefault(code int) *PostHyperflexClusterProfilesMoidDefault {\n\treturn &PostHyperflexClusterProfilesMoidDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (client IdentityClient) updateTagDefault(ctx context.Context, request common.OCIRequest, binaryReqBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (common.OCIResponse, error) {\n\n\thttpRequest, err := request.HTTPRequest(http.MethodPut, \"/tagDefaults/{tagDefaultId}\", binaryReqBody, extraHeaders)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tvar response UpdateTagDefaultResponse\n\tvar httpResponse *http.Response\n\thttpResponse, err = client.Call(ctx, &httpRequest)\n\tdefer common.CloseBodyIfValid(httpResponse)\n\tresponse.RawResponse = httpResponse\n\tif err != nil {\n\t\treturn response, err\n\t}\n\n\terr = common.UnmarshalResponse(httpResponse, &response)\n\treturn response, err\n}", "func (r *KeystoneAPI) Default() {\n\tkeystoneapilog.Info(\"default\", \"name\", r.Name)\n\n\tr.Spec.Default()\n}", "func (a *Client) PostHyperflexCapabilityInfosMoid(params *PostHyperflexCapabilityInfosMoidParams) (*PostHyperflexCapabilityInfosMoidCreated, error) {\n\t// TODO: Validate the params before sending\n\tif params == nil {\n\t\tparams = NewPostHyperflexCapabilityInfosMoidParams()\n\t}\n\n\tresult, err := a.transport.Submit(&runtime.ClientOperation{\n\t\tID: \"PostHyperflexCapabilityInfosMoid\",\n\t\tMethod: \"POST\",\n\t\tPathPattern: \"/hyperflex/CapabilityInfos/{moid}\",\n\t\tProducesMediaTypes: []string{\"application/json\"},\n\t\tConsumesMediaTypes: []string{\"application/json\"},\n\t\tSchemes: []string{\"https\"},\n\t\tParams: params,\n\t\tReader: &PostHyperflexCapabilityInfosMoidReader{formats: a.formats},\n\t\tContext: params.Context,\n\t\tClient: params.HTTPClient,\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsuccess, ok := result.(*PostHyperflexCapabilityInfosMoidCreated)\n\tif ok {\n\t\treturn success, nil\n\t}\n\t// unexpected success response\n\tunexpectedSuccess := result.(*PostHyperflexCapabilityInfosMoidDefault)\n\treturn nil, runtime.NewAPIError(\"unexpected success response: content available as default response in error\", unexpectedSuccess, unexpectedSuccess.Code())\n}", "func NewPatchClusterV2Default(code int) *PatchClusterV2Default {\n\treturn &PatchClusterV2Default{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexClusterReplicationNetworkPolicy(ctx context.Context, moid string) ApiPatchHyperflexClusterReplicationNetworkPolicyRequest {\n\treturn ApiPatchHyperflexClusterReplicationNetworkPolicyRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexVolume(ctx context.Context, moid string) ApiPatchHyperflexVolumeRequest {\n\treturn ApiPatchHyperflexVolumeRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (r ApiCreateHyperflexServerModelRequest) HyperflexServerModel(hyperflexServerModel HyperflexServerModel) ApiCreateHyperflexServerModelRequest {\n\tr.hyperflexServerModel = &hyperflexServerModel\n\treturn r\n}", "func (a *HyperflexApiService) PatchHyperflexHxdpVersionExecute(r ApiPatchHyperflexHxdpVersionRequest) (*HyperflexHxdpVersion, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexHxdpVersion\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexHxdpVersion\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/HxdpVersions/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexHxdpVersion == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexHxdpVersion is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexHxdpVersion\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) CreateHyperflexServerModelExecute(r ApiCreateHyperflexServerModelRequest) (*HyperflexServerModel, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPost\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexServerModel\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.CreateHyperflexServerModel\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ServerModels\"\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexServerModel == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexServerModel is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\tif r.ifNoneMatch != nil {\n\t\tlocalVarHeaderParams[\"If-None-Match\"] = parameterToString(*r.ifNoneMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexServerModel\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexHypervisorVirtualMachineExecute(r ApiPatchHyperflexHypervisorVirtualMachineRequest) (*HyperflexHypervisorVirtualMachine, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexHypervisorVirtualMachine\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexHypervisorVirtualMachine\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/HypervisorVirtualMachines/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexHypervisorVirtualMachine == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexHypervisorVirtualMachine is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexHypervisorVirtualMachine\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func DefaultPatchSetHealthMenstruationPersonalInfo(ctx context.Context, objects []*HealthMenstruationPersonalInfo, updateMasks []*field_mask1.FieldMask, db *gorm1.DB) ([]*HealthMenstruationPersonalInfo, error) {\n\tif len(objects) != len(updateMasks) {\n\t\treturn nil, fmt.Errorf(errors1.BadRepeatedFieldMaskTpl, len(updateMasks), len(objects))\n\t}\n\n\tresults := make([]*HealthMenstruationPersonalInfo, 0, len(objects))\n\tfor i, patcher := range objects {\n\t\tpbResponse, err := DefaultPatchHealthMenstruationPersonalInfo(ctx, patcher, updateMasks[i], db)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tresults = append(results, pbResponse)\n\t}\n\n\treturn results, nil\n}", "func (enterpriseManagement *EnterpriseManagementV1) SetDefaultHeaders(headers http.Header) {\n\tenterpriseManagement.Service.SetDefaultHeaders(headers)\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsSignalingProtocolsRsvpTeInterfaceAttributesInterfaceHellos(ctx context.Context, name string, interfaceId string, frinxOpenconfigMplsRsvpMplsrsvphellosHellosBodyParam FrinxOpenconfigMplsRsvpMplsrsvphellosHellosRequest1, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:signaling-protocols/frinx-openconfig-network-instance:rsvp-te/frinx-openconfig-network-instance:interface-attributes/frinx-openconfig-network-instance:interface/{interface-id}/frinx-openconfig-network-instance:hellos/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"interface-id\"+\"}\", fmt.Sprintf(\"%v\", interfaceId), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsRsvpMplsrsvphellosHellosBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func PatchHCO(ctx context.Context, cl kubecli.KubevirtClient, patch []byte) error {\n\thcoGVR := schema.GroupVersionResource{Group: v1beta1.SchemeGroupVersion.Group, Version: v1beta1.SchemeGroupVersion.Version, Resource: resource}\n\n\t_, err := cl.DynamicClient().Resource(hcoGVR).Namespace(flags.KubeVirtInstallNamespace).Patch(ctx, hcoutil.HyperConvergedName, types.JSONPatchType, patch, metav1.PatchOptions{})\n\treturn err\n}", "func (a *HyperflexApiService) DeleteHyperflexServerModel(ctx context.Context, moid string) ApiDeleteHyperflexServerModelRequest {\n\treturn ApiDeleteHyperflexServerModelRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *DefaultApiService) BootVM(ctx _context.Context) (*_nethttp.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = _nethttp.MethodPut\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFormFileName string\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/vm.boot\"\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := _neturl.Values{}\n\tlocalVarFormParams := _neturl.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFormFileName, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := _ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\treturn localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarHTTPResponse, nil\n}", "func PatchPipelineDefaultParameter(text string) (string, error) {\n\tdefaultBucket := GetStringConfig(DefaultBucketNameEnvVar)\n\tprojectId := GetStringConfig(ProjectIDEnvVar)\n\ttoPatch := map[string]string{\n\t\t\"{{kfp-default-bucket}}\": defaultBucket,\n\t\t\"{{kfp-project-id}}\": projectId,\n\t}\n\tfor key, value := range toPatch {\n\t\ttext = strings.Replace(text, key, value, -1)\n\t}\n\treturn text, nil\n}", "func (a *HyperflexApiService) PatchHyperflexClusterReplicationNetworkPolicyExecute(r ApiPatchHyperflexClusterReplicationNetworkPolicyRequest) (*HyperflexClusterReplicationNetworkPolicy, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexClusterReplicationNetworkPolicy\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexClusterReplicationNetworkPolicy\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ClusterReplicationNetworkPolicies/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexClusterReplicationNetworkPolicy == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexClusterReplicationNetworkPolicy is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexClusterReplicationNetworkPolicy\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (a *HyperflexApiService) PatchHyperflexAppCatalog(ctx context.Context, moid string) ApiPatchHyperflexAppCatalogRequest {\n\treturn ApiPatchHyperflexAppCatalogRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareVersionPolicy(ctx context.Context, moid string) ApiPatchHyperflexSoftwareVersionPolicyRequest {\n\treturn ApiPatchHyperflexSoftwareVersionPolicyRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *FrinxOpenconfigNetworkInstanceApiService) PutFrinxOpenconfigNetworkInstanceNetworkInstancesNetworkInstanceMplsSignalingProtocolsRsvpTeInterfaceAttributesInterfaceHellosConfig(ctx context.Context, name string, interfaceId string, frinxOpenconfigMplsRsvpMplsrsvphellosHellosConfigBodyParam FrinxOpenconfigMplsRsvpMplsrsvphellosHellosConfigRequest1, nodeId string) (*http.Response, error) {\n\tvar (\n\t\tlocalVarHttpMethod = strings.ToUpper(\"Put\")\n\t\tlocalVarPostBody interface{}\n\t\tlocalVarFileName string\n\t\tlocalVarFileBytes []byte\n\t\t\n\t)\n\n\t// create path and map variables\n\tlocalVarPath := a.client.cfg.BasePath + \"/config/network-topology:network-topology/network-topology:topology/unified/network-topology:node/{node-id}/yang-ext:mount/frinx-openconfig-network-instance:network-instances/frinx-openconfig-network-instance:network-instance/{name}/frinx-openconfig-network-instance:mpls/frinx-openconfig-network-instance:signaling-protocols/frinx-openconfig-network-instance:rsvp-te/frinx-openconfig-network-instance:interface-attributes/frinx-openconfig-network-instance:interface/{interface-id}/frinx-openconfig-network-instance:hellos/frinx-openconfig-network-instance:config/\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"name\"+\"}\", fmt.Sprintf(\"%v\", name), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"interface-id\"+\"}\", fmt.Sprintf(\"%v\", interfaceId), -1)\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"node-id\"+\"}\", fmt.Sprintf(\"%v\", nodeId), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\n\t// to determine the Content-Type header\n\tlocalVarHttpContentTypes := []string{\"application/json\", \"application/xml\"}\n\n\t// set Content-Type header\n\tlocalVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes)\n\tif localVarHttpContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHttpContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHttpHeaderAccepts := []string{\"application/json\", \"application/xml\"}\n\n\t// set Accept header\n\tlocalVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts)\n\tif localVarHttpHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHttpHeaderAccept\n\t}\n\t// body params\n\tlocalVarPostBody = &frinxOpenconfigMplsRsvpMplsrsvphellosHellosConfigBodyParam\n\tr, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tlocalVarHttpResponse, err := a.client.callAPI(r)\n\tif err != nil || localVarHttpResponse == nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body)\n\tlocalVarHttpResponse.Body.Close()\n\tif err != nil {\n\t\treturn localVarHttpResponse, err\n\t}\n\n\n\tif localVarHttpResponse.StatusCode >= 300 {\n\t\tnewErr := GenericSwaggerError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHttpResponse.Status,\n\t\t}\n\t\t\n\t\treturn localVarHttpResponse, newErr\n\t}\n\n\treturn localVarHttpResponse, nil\n}", "func CreatePolicyWithDefaults(nbmaster string, httpClient *http.Client, jwt string) {\r\n fmt.Printf(\"\\nSending a POST request to create %s with defaults...\\n\", testPolicyName)\r\n\r\n policy := map[string]interface{}{\r\n \"data\": map[string]interface{}{\r\n \"type\": \"policy\",\r\n \"id\": testPolicyName,\r\n \"attributes\": map[string]interface{}{\r\n \"policy\": map[string]interface{}{\r\n \"policyName\": testPolicyName,\r\n \"policyType\": \"VMware\",\r\n \"policyAttributes\": map[string]interface{}{},\r\n \"clients\":[]interface{}{},\r\n \"schedules\":[]interface{}{},\r\n \"backupSelections\": map[string]interface{}{\r\n \"selections\": []interface{}{}}}}}}\r\n\r\n policyRequest, _ := json.Marshal(policy)\r\n\r\n uri := \"https://\" + nbmaster + \":\" + port + \"/netbackup/\" + policiesUri\r\n\r\n request, _ := http.NewRequest(http.MethodPost, uri, bytes.NewBuffer(policyRequest))\r\n request.Header.Add(\"Content-Type\", contentTypeV2);\r\n request.Header.Add(\"Authorization\", jwt);\r\n\r\n response, err := httpClient.Do(request)\r\n\r\n if err != nil {\r\n fmt.Printf(\"The HTTP request failed with error: %s\\n\", err)\r\n panic(\"Unable to create policy.\\n\")\r\n } else {\r\n if response.StatusCode != 204 {\r\n printErrorResponse(response)\r\n } else {\r\n fmt.Printf(\"%s created successfully.\\n\", testPolicyName);\r\n responseDetails, _ := httputil.DumpResponse(response, true);\r\n fmt.Printf(string(responseDetails))\r\n }\r\n }\r\n}", "func (o ApiOperationResponseHeaderOutput) DefaultValue() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v ApiOperationResponseHeader) *string { return v.DefaultValue }).(pulumi.StringPtrOutput)\n}", "func NewPatchComponentHubDefault(code int) *PatchComponentHubDefault {\n\treturn &PatchComponentHubDefault{\n\t\t_statusCode: code,\n\t}\n}", "func NewPatchDeviceGroupPropertyByNameDefault(code int) *PatchDeviceGroupPropertyByNameDefault {\n\treturn &PatchDeviceGroupPropertyByNameDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexNodeConfigPolicy(ctx context.Context, moid string) ApiPatchHyperflexNodeConfigPolicyRequest {\n\treturn ApiPatchHyperflexNodeConfigPolicyRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func NewPatchServiceAccountTokenDefault(code int) *PatchServiceAccountTokenDefault {\n\treturn &PatchServiceAccountTokenDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexHealthCheckDefinition(ctx context.Context, moid string) ApiPatchHyperflexHealthCheckDefinitionRequest {\n\treturn ApiPatchHyperflexHealthCheckDefinitionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexClusterNetworkPolicyExecute(r ApiPatchHyperflexClusterNetworkPolicyRequest) (*HyperflexClusterNetworkPolicy, *http.Response, error) {\n\tvar (\n\t\tlocalVarHTTPMethod = http.MethodPatch\n\t\tlocalVarPostBody interface{}\n\t\tformFiles []formFile\n\t\tlocalVarReturnValue *HyperflexClusterNetworkPolicy\n\t)\n\n\tlocalBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, \"HyperflexApiService.PatchHyperflexClusterNetworkPolicy\")\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()}\n\t}\n\n\tlocalVarPath := localBasePath + \"/api/v1/hyperflex/ClusterNetworkPolicies/{Moid}\"\n\tlocalVarPath = strings.Replace(localVarPath, \"{\"+\"Moid\"+\"}\", url.PathEscape(parameterToString(r.moid, \"\")), -1)\n\n\tlocalVarHeaderParams := make(map[string]string)\n\tlocalVarQueryParams := url.Values{}\n\tlocalVarFormParams := url.Values{}\n\tif r.hyperflexClusterNetworkPolicy == nil {\n\t\treturn localVarReturnValue, nil, reportError(\"hyperflexClusterNetworkPolicy is required and must be specified\")\n\t}\n\n\t// to determine the Content-Type header\n\tlocalVarHTTPContentTypes := []string{\"application/json\", \"application/json-patch+json\"}\n\n\t// set Content-Type header\n\tlocalVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes)\n\tif localVarHTTPContentType != \"\" {\n\t\tlocalVarHeaderParams[\"Content-Type\"] = localVarHTTPContentType\n\t}\n\n\t// to determine the Accept header\n\tlocalVarHTTPHeaderAccepts := []string{\"application/json\"}\n\n\t// set Accept header\n\tlocalVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts)\n\tif localVarHTTPHeaderAccept != \"\" {\n\t\tlocalVarHeaderParams[\"Accept\"] = localVarHTTPHeaderAccept\n\t}\n\tif r.ifMatch != nil {\n\t\tlocalVarHeaderParams[\"If-Match\"] = parameterToString(*r.ifMatch, \"\")\n\t}\n\t// body params\n\tlocalVarPostBody = r.hyperflexClusterNetworkPolicy\n\treq, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles)\n\tif err != nil {\n\t\treturn localVarReturnValue, nil, err\n\t}\n\n\tlocalVarHTTPResponse, err := a.client.callAPI(req)\n\tif err != nil || localVarHTTPResponse == nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tlocalVarBody, err := ioutil.ReadAll(localVarHTTPResponse.Body)\n\tlocalVarHTTPResponse.Body.Close()\n\tlocalVarHTTPResponse.Body = ioutil.NopCloser(bytes.NewBuffer(localVarBody))\n\tif err != nil {\n\t\treturn localVarReturnValue, localVarHTTPResponse, err\n\t}\n\n\tif localVarHTTPResponse.StatusCode >= 300 {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: localVarHTTPResponse.Status,\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 400 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 401 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 403 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tif localVarHTTPResponse.StatusCode == 404 {\n\t\t\tvar v Error\n\t\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\t\tif err != nil {\n\t\t\t\tnewErr.error = err.Error()\n\t\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t\t}\n\t\t\tnewErr.model = v\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tvar v Error\n\t\terr = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\t\tif err != nil {\n\t\t\tnewErr.error = err.Error()\n\t\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t\t}\n\t\tnewErr.model = v\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\terr = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get(\"Content-Type\"))\n\tif err != nil {\n\t\tnewErr := &GenericOpenAPIError{\n\t\t\tbody: localVarBody,\n\t\t\terror: err.Error(),\n\t\t}\n\t\treturn localVarReturnValue, localVarHTTPResponse, newErr\n\t}\n\n\treturn localVarReturnValue, localVarHTTPResponse, nil\n}", "func (s *Service) NewDefaultProtocol(n *onet.TreeNodeInstance) (onet.ProtocolInstance, error) {\n\tsuite := pairing.NewSuiteBn256()\n\treturn simpleblscosi.NewProtocol(n, s.vf, n.Tree().ID, s.atomicCoinReserved, s.coinToAtomic, s.distances, suite)\n}", "func (a *HyperflexApiService) PatchHyperflexSoftwareDistributionVersion(ctx context.Context, moid string) ApiPatchHyperflexSoftwareDistributionVersionRequest {\n\treturn ApiPatchHyperflexSoftwareDistributionVersionRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}", "func (in *KubeadmControlPlane) Default() {\n\tif in.Spec.Replicas == nil {\n\t\treplicas := int32(1)\n\t\tin.Spec.Replicas = &replicas\n\t}\n\n\tif in.Spec.InfrastructureTemplate.Namespace == \"\" {\n\t\tin.Spec.InfrastructureTemplate.Namespace = in.Namespace\n\t}\n}", "func NewDcimDeviceTypesPartialUpdateDefault(code int) *DcimDeviceTypesPartialUpdateDefault {\n\treturn &DcimDeviceTypesPartialUpdateDefault{\n\t\t_statusCode: code,\n\t}\n}", "func (a *HyperflexApiService) PatchHyperflexClusterProfile(ctx context.Context, moid string) ApiPatchHyperflexClusterProfileRequest {\n\treturn ApiPatchHyperflexClusterProfileRequest{\n\t\tApiService: a,\n\t\tctx: ctx,\n\t\tmoid: moid,\n\t}\n}" ]
[ "0.65078557", "0.6498812", "0.60001904", "0.5752356", "0.5675827", "0.5667362", "0.562648", "0.5618662", "0.5546495", "0.55310225", "0.5496955", "0.5437951", "0.5423619", "0.5329679", "0.5311653", "0.5289661", "0.52641594", "0.5258812", "0.5217607", "0.5207181", "0.51895684", "0.51798344", "0.50799507", "0.5079737", "0.5043948", "0.5038465", "0.49899444", "0.49787086", "0.4972202", "0.49707282", "0.4965929", "0.49656725", "0.4937933", "0.49308035", "0.49204817", "0.4885882", "0.48857686", "0.48852357", "0.48834184", "0.4866311", "0.48658496", "0.48629063", "0.48556185", "0.48494962", "0.4845805", "0.48414928", "0.4827615", "0.4812", "0.4799438", "0.47977", "0.47919875", "0.47874925", "0.4781681", "0.47779602", "0.4736421", "0.47345906", "0.47327027", "0.47278404", "0.47240114", "0.47238174", "0.4714797", "0.47093296", "0.4697075", "0.4696061", "0.46923476", "0.4690881", "0.46869522", "0.46850264", "0.46813032", "0.467793", "0.46763468", "0.4675994", "0.4670821", "0.46683684", "0.46519616", "0.46515903", "0.4639064", "0.46336806", "0.4629725", "0.46290746", "0.4628224", "0.4626354", "0.46246707", "0.46160603", "0.46143126", "0.46108532", "0.46076944", "0.4605813", "0.45938042", "0.45933944", "0.45872077", "0.45848864", "0.45722878", "0.45719472", "0.4557223", "0.45533863", "0.45524812", "0.45523733", "0.4551562", "0.45485985" ]
0.7576664
0