hunk
dict | file
stringlengths 0
11.8M
| file_path
stringlengths 2
234
| label
int64 0
1
| commit_url
stringlengths 74
103
| dependency_score
sequencelengths 5
5
|
---|---|---|---|---|---|
{
"id": 3,
"code_window": [
" ReactiveEffectOptions\n",
"} from '@vue/reactivity'\n",
"import { queueJob, queuePostFlushCb } from './scheduler'\n",
"import { EMPTY_OBJ, isObject, isArray } from '@vue/shared'\n",
"import { recordEffect } from './apiState'\n",
"\n",
"export interface WatchOptions {\n",
" lazy?: boolean\n",
" flush?: 'pre' | 'post' | 'sync'\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"import { EMPTY_OBJ, isObject, isArray, isFunction } from '@vue/shared'\n"
],
"file_path": "packages/runtime-core/src/apiWatch.ts",
"type": "replace",
"edit_start_line_idx": 8
} | {
"packages": [
"packages/*"
],
"version": "3.0.0-alpha.1"
}
| lerna.json | 0 | https://github.com/vuejs/core/commit/36ab2ab9806d85653f928446316158538f0c840c | [
0.00017354909505229443,
0.00017354909505229443,
0.00017354909505229443,
0.00017354909505229443,
0
] |
{
"id": 3,
"code_window": [
" ReactiveEffectOptions\n",
"} from '@vue/reactivity'\n",
"import { queueJob, queuePostFlushCb } from './scheduler'\n",
"import { EMPTY_OBJ, isObject, isArray } from '@vue/shared'\n",
"import { recordEffect } from './apiState'\n",
"\n",
"export interface WatchOptions {\n",
" lazy?: boolean\n",
" flush?: 'pre' | 'post' | 'sync'\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"import { EMPTY_OBJ, isObject, isArray, isFunction } from '@vue/shared'\n"
],
"file_path": "packages/runtime-core/src/apiWatch.ts",
"type": "replace",
"edit_start_line_idx": 8
} | /*
Produce prodcution builds and stitch toegether d.ts files.
To specific the package to build, simply pass its name and the desired build
formats to output (defaults to `buildOptions.formats` specified in that package,
or "esm,cjs"):
```
# name supports fuzzy match. will build all packages with name containing "dom":
yarn build dom
# specify the format to output
yarn build core --formats cjs
```
*/
const fs = require('fs-extra')
const path = require('path')
const zlib = require('zlib')
const chalk = require('chalk')
const execa = require('execa')
const dts = require('dts-bundle')
const { targets, fuzzyMatchTarget } = require('./utils')
const args = require('minimist')(process.argv.slice(2))
const target = args._[0]
const formats = args.formats || args.f
const buildAllMatching = args.all || args.a
;(async () => {
if (!target) {
await buildAll(targets)
checkAllSizes(targets)
} else {
await buildAll(fuzzyMatchTarget(target, buildAllMatching))
checkAllSizes(fuzzyMatchTarget(target, buildAllMatching))
}
})()
async function buildAll(targets) {
for (const target of targets) {
await build(target)
}
}
async function build(target) {
const pkgDir = path.resolve(`packages/${target}`)
const pkg = require(`${pkgDir}/package.json`)
await fs.remove(`${pkgDir}/dist`)
await execa(
'rollup',
[
'-c',
'--environment',
`NODE_ENV:production,` +
`TARGET:${target}` +
(formats ? `,FORMATS:${formats}` : ``)
],
{ stdio: 'inherit' }
)
if (pkg.types) {
const dtsOptions = {
name: target === 'vue' ? target : `@vue/${target}`,
main: `${pkgDir}/dist/packages/${target}/src/index.d.ts`,
out: `${pkgDir}/${pkg.types}`
}
dts.bundle(dtsOptions)
console.log()
console.log(
chalk.blue(chalk.bold(`generated typings at ${dtsOptions.out}`))
)
await fs.remove(`${pkgDir}/dist/packages`)
}
}
function checkAllSizes(targets) {
console.log()
for (const target of targets) {
checkSize(target)
}
console.log()
}
function checkSize(target) {
const pkgDir = path.resolve(`packages/${target}`)
const esmProdBuild = `${pkgDir}/dist/${target}.esm-browser.prod.js`
if (fs.existsSync(esmProdBuild)) {
const file = fs.readFileSync(esmProdBuild)
const minSize = (file.length / 1024).toFixed(2) + 'kb'
const gzipped = zlib.gzipSync(file)
const gzipSize = (gzipped.length / 1024).toFixed(2) + 'kb'
console.log(
`${chalk.gray(chalk.bold(target))} min:${minSize} / gzip:${gzipSize}`
)
}
}
| scripts/build.js | 0 | https://github.com/vuejs/core/commit/36ab2ab9806d85653f928446316158538f0c840c | [
0.00017728102102410048,
0.0001737589482218027,
0.00016574267647229135,
0.00017416744958609343,
0.00000316444129566662
] |
{
"id": 4,
"code_window": [
" | ((value: any, oldValue: any, onCleanup: CleanupRegistrator) => any)\n",
" | WatchOptions,\n",
" options?: WatchOptions\n",
"): StopHandle {\n",
" if (typeof effectOrOptions === 'function') {\n",
" // effect callback as 2nd argument - this is a source watcher\n",
" return doWatch(effectOrSource, effectOrOptions, options)\n",
" } else {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" if (isFunction(effectOrOptions)) {\n"
],
"file_path": "packages/runtime-core/src/apiWatch.ts",
"type": "replace",
"edit_start_line_idx": 62
} | import {
effect,
stop,
isRef,
Ref,
ReactiveEffectOptions
} from '@vue/reactivity'
import { queueJob, queuePostFlushCb } from './scheduler'
import { EMPTY_OBJ, isObject, isArray } from '@vue/shared'
import { recordEffect } from './apiState'
export interface WatchOptions {
lazy?: boolean
flush?: 'pre' | 'post' | 'sync'
deep?: boolean
onTrack?: ReactiveEffectOptions['onTrack']
onTrigger?: ReactiveEffectOptions['onTrigger']
}
type StopHandle = () => void
type WatcherSource<T = any> = Ref<T> | (() => T)
type MapSources<T> = {
[K in keyof T]: T[K] extends WatcherSource<infer V> ? V : never
}
type CleanupRegistrator = (invalidate: () => void) => void
type SimpleEffect = (onCleanup: CleanupRegistrator) => void
const invoke = (fn: Function) => fn()
export function watch(effect: SimpleEffect, options?: WatchOptions): StopHandle
export function watch<T>(
source: WatcherSource<T>,
cb: (newValue: T, oldValue: T, onCleanup: CleanupRegistrator) => any,
options?: WatchOptions
): StopHandle
export function watch<T extends WatcherSource<unknown>[]>(
sources: T,
cb: (
newValues: MapSources<T>,
oldValues: MapSources<T>,
onCleanup: CleanupRegistrator
) => any,
options?: WatchOptions
): StopHandle
// implementation
export function watch(
effectOrSource:
| WatcherSource<unknown>
| WatcherSource<unknown>[]
| SimpleEffect,
effectOrOptions?:
| ((value: any, oldValue: any, onCleanup: CleanupRegistrator) => any)
| WatchOptions,
options?: WatchOptions
): StopHandle {
if (typeof effectOrOptions === 'function') {
// effect callback as 2nd argument - this is a source watcher
return doWatch(effectOrSource, effectOrOptions, options)
} else {
// 2nd argument is either missing or an options object
// - this is a simple effect watcher
return doWatch(effectOrSource, null, effectOrOptions)
}
}
function doWatch(
source: WatcherSource | WatcherSource[] | SimpleEffect,
cb:
| ((newValue: any, oldValue: any, onCleanup: CleanupRegistrator) => any)
| null,
{ lazy, deep, flush, onTrack, onTrigger }: WatchOptions = EMPTY_OBJ
): StopHandle {
const scheduler =
flush === 'sync' ? invoke : flush === 'pre' ? queueJob : queuePostFlushCb
const baseGetter = isArray(source)
? () => source.map(s => (isRef(s) ? s.value : s()))
: isRef(source)
? () => source.value
: () => source(registerCleanup)
const getter = deep ? () => traverse(baseGetter()) : baseGetter
let cleanup: any
const registerCleanup: CleanupRegistrator = (fn: () => void) => {
// TODO wrap the cleanup fn for error handling
cleanup = runner.onStop = fn
}
let oldValue: any
const applyCb = cb
? () => {
const newValue = runner()
if (deep || newValue !== oldValue) {
// cleanup before running cb again
if (cleanup) {
cleanup()
}
// TODO handle error (including ASYNC)
try {
cb(newValue, oldValue, registerCleanup)
} catch (e) {}
oldValue = newValue
}
}
: void 0
const runner = effect(getter, {
lazy: true,
// so it runs before component update effects in pre flush mode
computed: true,
onTrack,
onTrigger,
scheduler: applyCb ? () => scheduler(applyCb) : void 0
})
if (!lazy) {
applyCb && scheduler(applyCb)
} else {
oldValue = runner()
}
recordEffect(runner)
return () => {
stop(runner)
}
}
function traverse(value: any, seen: Set<any> = new Set()) {
if (!isObject(value) || seen.has(value)) {
return
}
seen.add(value)
if (isArray(value)) {
for (let i = 0; i < value.length; i++) {
traverse(value[i], seen)
}
} else if (value instanceof Map || value instanceof Set) {
;(value as any).forEach((v: any) => {
traverse(v, seen)
})
} else {
for (const key in value) {
traverse(value[key], seen)
}
}
return value
}
| packages/runtime-core/src/apiWatch.ts | 1 | https://github.com/vuejs/core/commit/36ab2ab9806d85653f928446316158538f0c840c | [
0.9934679865837097,
0.0655224397778511,
0.0001686259056441486,
0.0015558801824226975,
0.23967286944389343
] |
{
"id": 4,
"code_window": [
" | ((value: any, oldValue: any, onCleanup: CleanupRegistrator) => any)\n",
" | WatchOptions,\n",
" options?: WatchOptions\n",
"): StopHandle {\n",
" if (typeof effectOrOptions === 'function') {\n",
" // effect callback as 2nd argument - this is a source watcher\n",
" return doWatch(effectOrSource, effectOrOptions, options)\n",
" } else {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" if (isFunction(effectOrOptions)) {\n"
],
"file_path": "packages/runtime-core/src/apiWatch.ts",
"type": "replace",
"edit_start_line_idx": 62
} | import { ComponentInstance } from './component'
import { VNode, NormalizedChildren, normalizeVNode, VNodeChild } from './vnode'
import { isArray, isFunction } from '@vue/shared'
import { SLOTS_CHILDREN } from './typeFlags'
export type Slot = (...args: any[]) => VNode[]
export type Slots = Readonly<{
[name: string]: Slot
}>
export type RawSlots = {
[name: string]: unknown
}
const normalizeSlotValue = (value: unknown): VNode[] =>
isArray(value)
? value.map(normalizeVNode)
: [normalizeVNode(value as VNodeChild)]
const normalizeSlot = (rawSlot: Function): Slot => (props: any) =>
normalizeSlotValue(rawSlot(props))
export function resolveSlots(
instance: ComponentInstance,
children: NormalizedChildren
) {
let slots: Slots | void
if (instance.vnode.shapeFlag & SLOTS_CHILDREN) {
if ((children as any)._normalized) {
// pre-normalized slots object generated by compiler
slots = children as Slots
} else {
slots = {}
for (const key in children as RawSlots) {
let value = (children as RawSlots)[key]
if (isFunction(value)) {
;(slots as any)[key] = normalizeSlot(value)
} else {
if (__DEV__) {
// TODO show tip on using functions
console.log('use function slots!')
}
value = normalizeSlotValue(value)
;(slots as any)[key] = () => value
}
}
}
} else if (children !== null) {
// non slot object children (direct value) passed to a component
if (__DEV__) {
// TODO show tip on using functions
console.log('use function slots!')
}
const normalized = normalizeSlotValue(children)
slots = { default: () => normalized }
}
if (slots !== void 0) {
instance.slots = slots
}
}
| packages/runtime-core/src/componentSlots.ts | 0 | https://github.com/vuejs/core/commit/36ab2ab9806d85653f928446316158538f0c840c | [
0.0021166459191590548,
0.0005493867793120444,
0.00017051080067176372,
0.0002015409991145134,
0.0007067882688716054
] |
{
"id": 4,
"code_window": [
" | ((value: any, oldValue: any, onCleanup: CleanupRegistrator) => any)\n",
" | WatchOptions,\n",
" options?: WatchOptions\n",
"): StopHandle {\n",
" if (typeof effectOrOptions === 'function') {\n",
" // effect callback as 2nd argument - this is a source watcher\n",
" return doWatch(effectOrSource, effectOrOptions, options)\n",
" } else {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" if (isFunction(effectOrOptions)) {\n"
],
"file_path": "packages/runtime-core/src/apiWatch.ts",
"type": "replace",
"edit_start_line_idx": 62
} | 'use strict'
if (process.env.NODE_ENV === 'production') {
module.exports = require('./dist/vue.cjs.prod.js')
} else {
module.exports = require('./dist/vue.cjs.js')
}
| packages/vue-compat/index.js | 0 | https://github.com/vuejs/core/commit/36ab2ab9806d85653f928446316158538f0c840c | [
0.00017277504957746714,
0.00017277504957746714,
0.00017277504957746714,
0.00017277504957746714,
0
] |
{
"id": 4,
"code_window": [
" | ((value: any, oldValue: any, onCleanup: CleanupRegistrator) => any)\n",
" | WatchOptions,\n",
" options?: WatchOptions\n",
"): StopHandle {\n",
" if (typeof effectOrOptions === 'function') {\n",
" // effect callback as 2nd argument - this is a source watcher\n",
" return doWatch(effectOrSource, effectOrOptions, options)\n",
" } else {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" if (isFunction(effectOrOptions)) {\n"
],
"file_path": "packages/runtime-core/src/apiWatch.ts",
"type": "replace",
"edit_start_line_idx": 62
} | {
"name": "@vue/runtime-test",
"version": "3.0.0-alpha.1",
"description": "@vue/runtime-test",
"main": "index.js",
"module": "dist/runtime-test.esm-bundler.js",
"types": "dist/index.d.ts",
"repository": {
"type": "git",
"url": "git+https://github.com/vuejs/vue.git"
},
"buildOptions": {
"name": "VueTestRuntime",
"formats": ["esm", "cjs", "global"]
},
"keywords": [
"vue"
],
"author": "Evan You",
"license": "MIT",
"bugs": {
"url": "https://github.com/vuejs/vue/issues"
},
"homepage": "https://github.com/vuejs/vue/tree/dev/packages/runtime-test#readme",
"dependencies": {
"@vue/runtime-core": "3.0.0-alpha.1"
}
}
| packages/runtime-test/package.json | 0 | https://github.com/vuejs/core/commit/36ab2ab9806d85653f928446316158538f0c840c | [
0.00017662839672993869,
0.00017575327365193516,
0.00017443302203901112,
0.00017619844584260136,
9.499268571744324e-7
] |
{
"id": 0,
"code_window": [
"package mysql\n",
"\n",
"import (\n",
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n",
"\n",
"//const rsString = `(?:\"([^\"]*)\")`;\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"strings\"\n",
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 5
} | package mysql
import (
"fmt"
"regexp"
"github.com/grafana/grafana/pkg/tsdb"
)
//const rsString = `(?:"([^"]*)")`;
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type MySqlMacroEngine struct {
TimeRange *tsdb.TimeRange
}
func NewMysqlMacroEngine() tsdb.SqlMacroEngine {
return &MySqlMacroEngine{}
}
func (m *MySqlMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) {
m.TimeRange = timeRange
rExp, _ := regexp.Compile(sExpr)
var macroError error
sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
res, err := m.evaluateMacro(groups[1], groups[2:])
if err != nil && macroError == nil {
macroError = err
return "macro_error()"
}
return res
})
if macroError != nil {
return "", macroError
}
return sql, nil
}
func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
result := ""
lastIndex := 0
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
groups := []string{}
for i := 0; i < len(v); i += 2 {
groups = append(groups, str[v[i]:v[i+1]])
}
result += str[lastIndex:v[0]] + repl(groups)
lastIndex = v[1]
}
return result + str[lastIndex:]
}
func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("UNIX_TIMESTAMP(%s) as time_sec", args[0]), nil
case "__timeFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeFrom":
return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__timeTo":
return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__unixEpochFrom":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
default:
return "", fmt.Errorf("Unknown macro %v", name)
}
}
| pkg/tsdb/mysql/macros.go | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.9982296824455261,
0.20959347486495972,
0.00016865305951796472,
0.00020377215696498752,
0.3926261365413666
] |
{
"id": 0,
"code_window": [
"package mysql\n",
"\n",
"import (\n",
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n",
"\n",
"//const rsString = `(?:\"([^\"]*)\")`;\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"strings\"\n",
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 5
} | package imguploader
import (
"context"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/util"
"golang.org/x/oauth2/google"
)
const (
tokenUrl string = "https://www.googleapis.com/auth/devstorage.read_write"
uploadUrl string = "https://www.googleapis.com/upload/storage/v1/b/%s/o?uploadType=media&name=%s&predefinedAcl=publicRead"
)
type GCSUploader struct {
keyFile string
bucket string
log log.Logger
}
func NewGCSUploader(keyFile, bucket string) *GCSUploader {
return &GCSUploader{
keyFile: keyFile,
bucket: bucket,
log: log.New("gcsuploader"),
}
}
func (u *GCSUploader) Upload(ctx context.Context, imageDiskPath string) (string, error) {
key := util.GetRandomString(20) + ".png"
u.log.Debug("Opening key file ", u.keyFile)
data, err := ioutil.ReadFile(u.keyFile)
if err != nil {
return "", err
}
u.log.Debug("Creating JWT conf")
conf, err := google.JWTConfigFromJSON(data, tokenUrl)
if err != nil {
return "", err
}
u.log.Debug("Creating HTTP client")
client := conf.Client(ctx)
err = u.uploadFile(client, imageDiskPath, key)
if err != nil {
return "", err
}
return fmt.Sprintf("https://storage.googleapis.com/%s/%s", u.bucket, key), nil
}
func (u *GCSUploader) uploadFile(client *http.Client, imageDiskPath, key string) error {
u.log.Debug("Opening image file ", imageDiskPath)
fileReader, err := os.Open(imageDiskPath)
if err != nil {
return err
}
reqUrl := fmt.Sprintf(uploadUrl, u.bucket, key)
u.log.Debug("Request URL: ", reqUrl)
req, err := http.NewRequest("POST", reqUrl, fileReader)
if err != nil {
return err
}
req.Header.Add("Content-Type", "image/png")
u.log.Debug("Sending POST request to GCS")
resp, err := client.Do(req)
if err != nil {
return err
}
if resp.StatusCode != 200 {
return fmt.Errorf("GCS response status code %d", resp.StatusCode)
}
return nil
}
| pkg/components/imguploader/gcsuploader.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0033967867493629456,
0.0006587921525351703,
0.0001675301609793678,
0.00017071088950615376,
0.0010344741167500615
] |
{
"id": 0,
"code_window": [
"package mysql\n",
"\n",
"import (\n",
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n",
"\n",
"//const rsString = `(?:\"([^\"]*)\")`;\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"strings\"\n",
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 5
} | _0 | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-288 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017473017214797437,
0.00017473017214797437,
0.00017473017214797437,
0.00017473017214797437,
0
] |
{
"id": 0,
"code_window": [
"package mysql\n",
"\n",
"import (\n",
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n",
"\n",
"//const rsString = `(?:\"([^\"]*)\")`;\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"strings\"\n",
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 5
} | // Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package remote
import (
"fmt"
"net/url"
"sync"
"time"
"github.com/uber/jaeger-client-go/internal/baggage"
thrift "github.com/uber/jaeger-client-go/thrift-gen/baggage"
"github.com/uber/jaeger-client-go/utils"
)
type httpBaggageRestrictionManagerProxy struct {
url string
}
func newHTTPBaggageRestrictionManagerProxy(hostPort, serviceName string) *httpBaggageRestrictionManagerProxy {
v := url.Values{}
v.Set("service", serviceName)
return &httpBaggageRestrictionManagerProxy{
url: fmt.Sprintf("http://%s/baggageRestrictions?%s", hostPort, v.Encode()),
}
}
func (s *httpBaggageRestrictionManagerProxy) GetBaggageRestrictions(serviceName string) ([]*thrift.BaggageRestriction, error) {
var out []*thrift.BaggageRestriction
if err := utils.GetJSON(s.url, &out); err != nil {
return nil, err
}
return out, nil
}
// RestrictionManager manages baggage restrictions by retrieving baggage restrictions from agent
type RestrictionManager struct {
options
mux sync.RWMutex
serviceName string
restrictions map[string]*baggage.Restriction
thriftProxy thrift.BaggageRestrictionManager
pollStopped sync.WaitGroup
stopPoll chan struct{}
invalidRestriction *baggage.Restriction
validRestriction *baggage.Restriction
// Determines if the manager has successfully retrieved baggage restrictions from agent
initialized bool
}
// NewRestrictionManager returns a BaggageRestrictionManager that polls the agent for the latest
// baggage restrictions.
func NewRestrictionManager(serviceName string, options ...Option) *RestrictionManager {
// TODO there is a developing use case where a single tracer can generate traces on behalf of many services.
// restrictionsMap will need to exist per service
opts := applyOptions(options...)
m := &RestrictionManager{
serviceName: serviceName,
options: opts,
restrictions: make(map[string]*baggage.Restriction),
thriftProxy: newHTTPBaggageRestrictionManagerProxy(opts.hostPort, serviceName),
stopPoll: make(chan struct{}),
invalidRestriction: baggage.NewRestriction(false, 0),
validRestriction: baggage.NewRestriction(true, defaultMaxValueLength),
}
m.pollStopped.Add(1)
go m.pollManager()
return m
}
// isReady returns true if the manager has retrieved baggage restrictions from the remote source.
func (m *RestrictionManager) isReady() bool {
m.mux.RLock()
defer m.mux.RUnlock()
return m.initialized
}
// GetRestriction implements RestrictionManager#GetRestriction.
func (m *RestrictionManager) GetRestriction(key string) *baggage.Restriction {
m.mux.RLock()
defer m.mux.RUnlock()
if !m.initialized {
if m.denyBaggageOnInitializationFailure {
return m.invalidRestriction
}
return m.validRestriction
}
if restriction, ok := m.restrictions[key]; ok {
return restriction
}
return m.invalidRestriction
}
// Close stops remote polling and closes the RemoteRestrictionManager.
func (m *RestrictionManager) Close() error {
close(m.stopPoll)
m.pollStopped.Wait()
return nil
}
func (m *RestrictionManager) pollManager() {
defer m.pollStopped.Done()
// attempt to initialize baggage restrictions
if err := m.updateRestrictions(); err != nil {
m.logger.Error(fmt.Sprintf("Failed to initialize baggage restrictions: %s", err.Error()))
}
ticker := time.NewTicker(m.refreshInterval)
defer ticker.Stop()
for {
select {
case <-ticker.C:
if err := m.updateRestrictions(); err != nil {
m.logger.Error(fmt.Sprintf("Failed to update baggage restrictions: %s", err.Error()))
}
case <-m.stopPoll:
return
}
}
}
func (m *RestrictionManager) updateRestrictions() error {
restrictions, err := m.thriftProxy.GetBaggageRestrictions(m.serviceName)
if err != nil {
m.metrics.BaggageRestrictionsUpdateFailure.Inc(1)
return err
}
newRestrictions := m.parseRestrictions(restrictions)
m.metrics.BaggageRestrictionsUpdateSuccess.Inc(1)
m.mux.Lock()
defer m.mux.Unlock()
m.initialized = true
m.restrictions = newRestrictions
return nil
}
func (m *RestrictionManager) parseRestrictions(restrictions []*thrift.BaggageRestriction) map[string]*baggage.Restriction {
setters := make(map[string]*baggage.Restriction, len(restrictions))
for _, restriction := range restrictions {
setters[restriction.BaggageKey] = baggage.NewRestriction(true, int(restriction.MaxValueLength))
}
return setters
}
| vendor/github.com/uber/jaeger-client-go/internal/baggage/remote/restriction_manager.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.000383886945201084,
0.0001850984408520162,
0.00016418857558164746,
0.0001698453415883705,
0.00005041652548243292
] |
{
"id": 1,
"code_window": [
"\tvar macroError error\n",
"\n",
"\tsql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {\n",
"\t\tres, err := m.evaluateMacro(groups[1], groups[2:])\n",
"\t\tif err != nil && macroError == nil {\n",
"\t\t\tmacroError = err\n",
"\t\t\treturn \"macro_error()\"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tres, err := m.evaluateMacro(groups[1], strings.Split(groups[2], \",\"))\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "replace",
"edit_start_line_idx": 27
} | <query-editor-row query-ctrl="ctrl" can-collapse="false">
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<code-editor content="ctrl.target.rawSql" datasource="ctrl.datasource" on-change="ctrl.panelCtrl.refresh()" data-mode="sql">
</code-editor>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword">Format as</label>
<div class="gf-form-select-wrapper">
<select class="gf-form-input gf-size-auto" ng-model="ctrl.target.format" ng-options="f.value as f.text for f in ctrl.formats" ng-change="ctrl.refresh()"></select>
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
Show Help
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryMeta">
<label class="gf-form-label query-keyword" ng-click="ctrl.showLastQuerySQL = !ctrl.showLastQuerySQL">
Generated SQL
<i class="fa fa-caret-down" ng-show="ctrl.showLastQuerySQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showLastQuerySQL"></i>
</label>
</div>
<div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showLastQuerySQL">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.sql}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named time_sec (UTC in seconds), use UNIX_TIMESTAMP(column)
- return column named value for the time point value
- return column named metric to represent the series name
Table:
- return any set of columns
Macros:
- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
- $__timeGroup(column,'5m') -> (extract(epoch from "dateColumn")/extract(epoch from '5m'::interval))::int
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -> FROM_UNIXTIME(1492750877)
- $__timeTo() -> FROM_UNIXTIME(1492750877)
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
</pre>
</div>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
| public/app/plugins/datasource/mysql/partials/query.editor.html | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017315174045506865,
0.0001700512511888519,
0.00016614710330031812,
0.0001697363768471405,
0.000002183325477744802
] |
{
"id": 1,
"code_window": [
"\tvar macroError error\n",
"\n",
"\tsql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {\n",
"\t\tres, err := m.evaluateMacro(groups[1], groups[2:])\n",
"\t\tif err != nil && macroError == nil {\n",
"\t\t\tmacroError = err\n",
"\t\t\treturn \"macro_error()\"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tres, err := m.evaluateMacro(groups[1], strings.Split(groups[2], \",\"))\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "replace",
"edit_start_line_idx": 27
} | // Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package html implements an HTML5-compliant tokenizer and parser.
Tokenization is done by creating a Tokenizer for an io.Reader r. It is the
caller's responsibility to ensure that r provides UTF-8 encoded HTML.
z := html.NewTokenizer(r)
Given a Tokenizer z, the HTML is tokenized by repeatedly calling z.Next(),
which parses the next token and returns its type, or an error:
for {
tt := z.Next()
if tt == html.ErrorToken {
// ...
return ...
}
// Process the current token.
}
There are two APIs for retrieving the current token. The high-level API is to
call Token; the low-level API is to call Text or TagName / TagAttr. Both APIs
allow optionally calling Raw after Next but before Token, Text, TagName, or
TagAttr. In EBNF notation, the valid call sequence per token is:
Next {Raw} [ Token | Text | TagName {TagAttr} ]
Token returns an independent data structure that completely describes a token.
Entities (such as "<") are unescaped, tag names and attribute keys are
lower-cased, and attributes are collected into a []Attribute. For example:
for {
if z.Next() == html.ErrorToken {
// Returning io.EOF indicates success.
return z.Err()
}
emitToken(z.Token())
}
The low-level API performs fewer allocations and copies, but the contents of
the []byte values returned by Text, TagName and TagAttr may change on the next
call to Next. For example, to extract an HTML page's anchor text:
depth := 0
for {
tt := z.Next()
switch tt {
case ErrorToken:
return z.Err()
case TextToken:
if depth > 0 {
// emitBytes should copy the []byte it receives,
// if it doesn't process it immediately.
emitBytes(z.Text())
}
case StartTagToken, EndTagToken:
tn, _ := z.TagName()
if len(tn) == 1 && tn[0] == 'a' {
if tt == StartTagToken {
depth++
} else {
depth--
}
}
}
}
Parsing is done by calling Parse with an io.Reader, which returns the root of
the parse tree (the document element) as a *Node. It is the caller's
responsibility to ensure that the Reader provides UTF-8 encoded HTML. For
example, to process each anchor node in depth-first order:
doc, err := html.Parse(r)
if err != nil {
// ...
}
var f func(*html.Node)
f = func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "a" {
// Do something with n...
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
f(c)
}
}
f(doc)
The relevant specifications include:
https://html.spec.whatwg.org/multipage/syntax.html and
https://html.spec.whatwg.org/multipage/syntax.html#tokenization
*/
package html // import "golang.org/x/net/html"
// The tokenization algorithm implemented by this package is not a line-by-line
// transliteration of the relatively verbose state-machine in the WHATWG
// specification. A more direct approach is used instead, where the program
// counter implies the state, such as whether it is tokenizing a tag or a text
// node. Specification compliance is verified by checking expected and actual
// outputs over a test suite rather than aiming for algorithmic fidelity.
// TODO(nigeltao): Does a DOM API belong in this package or a separate one?
// TODO(nigeltao): How does parsing interact with a JavaScript engine?
| vendor/golang.org/x/net/html/doc.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017552016652189195,
0.0001691081706667319,
0.00016164070984814316,
0.00016719073755666614,
0.000004926624569634441
] |
{
"id": 1,
"code_window": [
"\tvar macroError error\n",
"\n",
"\tsql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {\n",
"\t\tres, err := m.evaluateMacro(groups[1], groups[2:])\n",
"\t\tif err != nil && macroError == nil {\n",
"\t\t\tmacroError = err\n",
"\t\t\treturn \"macro_error()\"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tres, err := m.evaluateMacro(groups[1], strings.Split(groups[2], \",\"))\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "replace",
"edit_start_line_idx": 27
} | foo.[abc, def] | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-552 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0001784177147783339,
0.0001784177147783339,
0.0001784177147783339,
0.0001784177147783339,
0
] |
{
"id": 1,
"code_window": [
"\tvar macroError error\n",
"\n",
"\tsql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {\n",
"\t\tres, err := m.evaluateMacro(groups[1], groups[2:])\n",
"\t\tif err != nil && macroError == nil {\n",
"\t\t\tmacroError = err\n",
"\t\t\treturn \"macro_error()\"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tres, err := m.evaluateMacro(groups[1], strings.Split(groups[2], \",\"))\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "replace",
"edit_start_line_idx": 27
} | /*
* Copyright (c) 2013 Dave Collins <[email protected]>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package spew_test
import (
"fmt"
"reflect"
"testing"
"github.com/davecgh/go-spew/spew"
)
// custom type to test Stinger interface on non-pointer receiver.
type stringer string
// String implements the Stringer interface for testing invocation of custom
// stringers on types with non-pointer receivers.
func (s stringer) String() string {
return "stringer " + string(s)
}
// custom type to test Stinger interface on pointer receiver.
type pstringer string
// String implements the Stringer interface for testing invocation of custom
// stringers on types with only pointer receivers.
func (s *pstringer) String() string {
return "stringer " + string(*s)
}
// xref1 and xref2 are cross referencing structs for testing circular reference
// detection.
type xref1 struct {
ps2 *xref2
}
type xref2 struct {
ps1 *xref1
}
// indirCir1, indirCir2, and indirCir3 are used to generate an indirect circular
// reference for testing detection.
type indirCir1 struct {
ps2 *indirCir2
}
type indirCir2 struct {
ps3 *indirCir3
}
type indirCir3 struct {
ps1 *indirCir1
}
// embed is used to test embedded structures.
type embed struct {
a string
}
// embedwrap is used to test embedded structures.
type embedwrap struct {
*embed
e *embed
}
// panicer is used to intentionally cause a panic for testing spew properly
// handles them
type panicer int
func (p panicer) String() string {
panic("test panic")
}
// customError is used to test custom error interface invocation.
type customError int
func (e customError) Error() string {
return fmt.Sprintf("error: %d", int(e))
}
// stringizeWants converts a slice of wanted test output into a format suitable
// for a test error message.
func stringizeWants(wants []string) string {
s := ""
for i, want := range wants {
if i > 0 {
s += fmt.Sprintf("want%d: %s", i+1, want)
} else {
s += "want: " + want
}
}
return s
}
// testFailed returns whether or not a test failed by checking if the result
// of the test is in the slice of wanted strings.
func testFailed(result string, wants []string) bool {
for _, want := range wants {
if result == want {
return false
}
}
return true
}
type sortableStruct struct {
x int
}
func (ss sortableStruct) String() string {
return fmt.Sprintf("ss.%d", ss.x)
}
type unsortableStruct struct {
x int
}
type sortTestCase struct {
input []reflect.Value
expected []reflect.Value
}
func helpTestSortValues(tests []sortTestCase, cs *spew.ConfigState, t *testing.T) {
getInterfaces := func(values []reflect.Value) []interface{} {
interfaces := []interface{}{}
for _, v := range values {
interfaces = append(interfaces, v.Interface())
}
return interfaces
}
for _, test := range tests {
spew.SortValues(test.input, cs)
// reflect.DeepEqual cannot really make sense of reflect.Value,
// probably because of all the pointer tricks. For instance,
// v(2.0) != v(2.0) on a 32-bits system. Turn them into interface{}
// instead.
input := getInterfaces(test.input)
expected := getInterfaces(test.expected)
if !reflect.DeepEqual(input, expected) {
t.Errorf("Sort mismatch:\n %v != %v", input, expected)
}
}
}
// TestSortValues ensures the sort functionality for relect.Value based sorting
// works as intended.
func TestSortValues(t *testing.T) {
v := reflect.ValueOf
a := v("a")
b := v("b")
c := v("c")
embedA := v(embed{"a"})
embedB := v(embed{"b"})
embedC := v(embed{"c"})
tests := []sortTestCase{
// No values.
{
[]reflect.Value{},
[]reflect.Value{},
},
// Bools.
{
[]reflect.Value{v(false), v(true), v(false)},
[]reflect.Value{v(false), v(false), v(true)},
},
// Ints.
{
[]reflect.Value{v(2), v(1), v(3)},
[]reflect.Value{v(1), v(2), v(3)},
},
// Uints.
{
[]reflect.Value{v(uint8(2)), v(uint8(1)), v(uint8(3))},
[]reflect.Value{v(uint8(1)), v(uint8(2)), v(uint8(3))},
},
// Floats.
{
[]reflect.Value{v(2.0), v(1.0), v(3.0)},
[]reflect.Value{v(1.0), v(2.0), v(3.0)},
},
// Strings.
{
[]reflect.Value{b, a, c},
[]reflect.Value{a, b, c},
},
// Array
{
[]reflect.Value{v([3]int{3, 2, 1}), v([3]int{1, 3, 2}), v([3]int{1, 2, 3})},
[]reflect.Value{v([3]int{1, 2, 3}), v([3]int{1, 3, 2}), v([3]int{3, 2, 1})},
},
// Uintptrs.
{
[]reflect.Value{v(uintptr(2)), v(uintptr(1)), v(uintptr(3))},
[]reflect.Value{v(uintptr(1)), v(uintptr(2)), v(uintptr(3))},
},
// SortableStructs.
{
// Note: not sorted - DisableMethods is set.
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
},
// UnsortableStructs.
{
// Note: not sorted - SpewKeys is false.
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
},
// Invalid.
{
[]reflect.Value{embedB, embedA, embedC},
[]reflect.Value{embedB, embedA, embedC},
},
}
cs := spew.ConfigState{DisableMethods: true, SpewKeys: false}
helpTestSortValues(tests, &cs, t)
}
// TestSortValuesWithMethods ensures the sort functionality for relect.Value
// based sorting works as intended when using string methods.
func TestSortValuesWithMethods(t *testing.T) {
v := reflect.ValueOf
a := v("a")
b := v("b")
c := v("c")
tests := []sortTestCase{
// Ints.
{
[]reflect.Value{v(2), v(1), v(3)},
[]reflect.Value{v(1), v(2), v(3)},
},
// Strings.
{
[]reflect.Value{b, a, c},
[]reflect.Value{a, b, c},
},
// SortableStructs.
{
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
[]reflect.Value{v(sortableStruct{1}), v(sortableStruct{2}), v(sortableStruct{3})},
},
// UnsortableStructs.
{
// Note: not sorted - SpewKeys is false.
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
},
}
cs := spew.ConfigState{DisableMethods: false, SpewKeys: false}
helpTestSortValues(tests, &cs, t)
}
// TestSortValuesWithSpew ensures the sort functionality for relect.Value
// based sorting works as intended when using spew to stringify keys.
func TestSortValuesWithSpew(t *testing.T) {
v := reflect.ValueOf
a := v("a")
b := v("b")
c := v("c")
tests := []sortTestCase{
// Ints.
{
[]reflect.Value{v(2), v(1), v(3)},
[]reflect.Value{v(1), v(2), v(3)},
},
// Strings.
{
[]reflect.Value{b, a, c},
[]reflect.Value{a, b, c},
},
// SortableStructs.
{
[]reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
[]reflect.Value{v(sortableStruct{1}), v(sortableStruct{2}), v(sortableStruct{3})},
},
// UnsortableStructs.
{
[]reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
[]reflect.Value{v(unsortableStruct{1}), v(unsortableStruct{2}), v(unsortableStruct{3})},
},
}
cs := spew.ConfigState{DisableMethods: true, SpewKeys: true}
helpTestSortValues(tests, &cs, t)
}
| vendor/github.com/davecgh/go-spew/spew/common_test.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0011495721992105246,
0.0002509068581275642,
0.00016236644296441227,
0.0001744153705658391,
0.00023623199376743287
] |
{
"id": 2,
"code_window": [
"\tcase \"__timeFrom\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) != 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `'\" `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 75
} | <query-editor-row query-ctrl="ctrl" can-collapse="false">
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<code-editor content="ctrl.target.rawSql" datasource="ctrl.datasource" on-change="ctrl.panelCtrl.refresh()" data-mode="sql">
</code-editor>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword">Format as</label>
<div class="gf-form-select-wrapper">
<select class="gf-form-input gf-size-auto" ng-model="ctrl.target.format" ng-options="f.value as f.text for f in ctrl.formats" ng-change="ctrl.refresh()"></select>
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
Show Help
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryMeta">
<label class="gf-form-label query-keyword" ng-click="ctrl.showLastQuerySQL = !ctrl.showLastQuerySQL">
Generated SQL
<i class="fa fa-caret-down" ng-show="ctrl.showLastQuerySQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showLastQuerySQL"></i>
</label>
</div>
<div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showLastQuerySQL">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.sql}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named <i>time</i> (UTC in seconds or timestamp)
- return column(s) with numeric datatype as values
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
Table:
- return any set of columns
Macros:
- $__time(column) -> column as "time"
- $__timeEpoch -> extract(epoch from column) as "time"
- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)
- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877
To group by time use $__timeGroup:
-> (extract(epoch from column)/extract(epoch from column::interval))::int
Example of group by and order by with $__timeGroup:
SELECT
min(date_time_col) AS time_sec,
sum(value_double) as value
FROM yourtable
group by $__timeGroup(date_time_col, '1h')
order by $__timeGroup(date_time_col, '1h') ASC
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -> to_timestamp(1492750877)
- $__timeTo() -> to_timestamp(1492750877)
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
</pre>
</div>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
| public/app/plugins/datasource/postgres/partials/query.editor.html | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.01586439460515976,
0.002507910830900073,
0.00016908056568354368,
0.00017170993669424206,
0.005137499887496233
] |
{
"id": 2,
"code_window": [
"\tcase \"__timeFrom\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) != 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `'\" `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 75
} | // +build codegen
package api
import (
"bytes"
"fmt"
"text/template"
)
// A ShapeValidationType is the type of validation that a shape needs
type ShapeValidationType int
const (
// ShapeValidationRequired states the shape must be set
ShapeValidationRequired = iota
// ShapeValidationMinVal states the shape must have at least a number of
// elements, or for numbers a minimum value
ShapeValidationMinVal
// ShapeValidationNested states the shape has nested values that need
// to be validated
ShapeValidationNested
)
// A ShapeValidation contains information about a shape and the type of validation
// that is needed
type ShapeValidation struct {
// Name of the shape to be validated
Name string
// Reference to the shape within the context the shape is referenced
Ref *ShapeRef
// Type of validation needed
Type ShapeValidationType
}
var validationGoCodeTmpls = template.Must(template.New("validationGoCodeTmpls").Parse(`
{{ define "requiredValue" -}}
if s.{{ .Name }} == nil {
invalidParams.Add(request.NewErrParamRequired("{{ .Name }}"))
}
{{- end }}
{{ define "minLen" -}}
if s.{{ .Name }} != nil && len(s.{{ .Name }}) < {{ .Ref.Shape.Min }} {
invalidParams.Add(request.NewErrParamMinLen("{{ .Name }}", {{ .Ref.Shape.Min }}))
}
{{- end }}
{{ define "minLenString" -}}
if s.{{ .Name }} != nil && len(*s.{{ .Name }}) < {{ .Ref.Shape.Min }} {
invalidParams.Add(request.NewErrParamMinLen("{{ .Name }}", {{ .Ref.Shape.Min }}))
}
{{- end }}
{{ define "minVal" -}}
if s.{{ .Name }} != nil && *s.{{ .Name }} < {{ .Ref.Shape.Min }} {
invalidParams.Add(request.NewErrParamMinValue("{{ .Name }}", {{ .Ref.Shape.Min }}))
}
{{- end }}
{{ define "nestedMapList" -}}
if s.{{ .Name }} != nil {
for i, v := range s.{{ .Name }} {
if v == nil { continue }
if err := v.Validate(); err != nil {
invalidParams.AddNested(fmt.Sprintf("%s[%v]", "{{ .Name }}", i), err.(request.ErrInvalidParams))
}
}
}
{{- end }}
{{ define "nestedStruct" -}}
if s.{{ .Name }} != nil {
if err := s.{{ .Name }}.Validate(); err != nil {
invalidParams.AddNested("{{ .Name }}", err.(request.ErrInvalidParams))
}
}
{{- end }}
`))
// GoCode returns the generated Go code for the Shape with its validation type.
func (sv ShapeValidation) GoCode() string {
var err error
w := &bytes.Buffer{}
switch sv.Type {
case ShapeValidationRequired:
err = validationGoCodeTmpls.ExecuteTemplate(w, "requiredValue", sv)
case ShapeValidationMinVal:
switch sv.Ref.Shape.Type {
case "list", "map", "blob":
err = validationGoCodeTmpls.ExecuteTemplate(w, "minLen", sv)
case "string":
err = validationGoCodeTmpls.ExecuteTemplate(w, "minLenString", sv)
case "integer", "long", "float", "double":
err = validationGoCodeTmpls.ExecuteTemplate(w, "minVal", sv)
default:
panic(fmt.Sprintf("ShapeValidation.GoCode, %s's type %s, no min value handling",
sv.Name, sv.Ref.Shape.Type))
}
case ShapeValidationNested:
switch sv.Ref.Shape.Type {
case "map", "list":
err = validationGoCodeTmpls.ExecuteTemplate(w, "nestedMapList", sv)
default:
err = validationGoCodeTmpls.ExecuteTemplate(w, "nestedStruct", sv)
}
default:
panic(fmt.Sprintf("ShapeValidation.GoCode, %s's type %d, unknown validation type",
sv.Name, sv.Type))
}
if err != nil {
panic(fmt.Sprintf("ShapeValidation.GoCode failed, err: %v", err))
}
return w.String()
}
// A ShapeValidations is a collection of shape validations needed nested within
// a parent shape
type ShapeValidations []ShapeValidation
var validateShapeTmpl = template.Must(template.New("ValidateShape").Parse(`
// Validate inspects the fields of the type to determine if they are valid.
func (s *{{ .Shape.ShapeName }}) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "{{ .Shape.ShapeName }}"}
{{ range $_, $v := .Validations -}}
{{ $v.GoCode }}
{{ end }}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
`))
// GoCode generates the Go code needed to perform validations for the
// shape and its nested fields.
func (vs ShapeValidations) GoCode(shape *Shape) string {
buf := &bytes.Buffer{}
validateShapeTmpl.Execute(buf, map[string]interface{}{
"Shape": shape,
"Validations": vs,
})
return buf.String()
}
// Has returns true or false if the ShapeValidations already contains the
// the reference and validation type.
func (vs ShapeValidations) Has(ref *ShapeRef, typ ShapeValidationType) bool {
for _, v := range vs {
if v.Ref == ref && v.Type == typ {
return true
}
}
return false
}
| vendor/github.com/aws/aws-sdk-go/private/model/api/shape_validation.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00018070300575345755,
0.00017027136345859617,
0.00016363717440981418,
0.0001693920057732612,
0.000004172751687292475
] |
{
"id": 2,
"code_window": [
"\tcase \"__timeFrom\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) != 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `'\" `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 75
} | // Copyright 2012 Aaron Jacobs. All Rights Reserved.
// Author: [email protected] (Aaron Jacobs)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package oglematchers
import (
"errors"
"fmt"
"reflect"
)
// Return a matcher that matches non-nil pointers whose pointee matches the
// wrapped matcher.
func Pointee(m Matcher) Matcher {
return &pointeeMatcher{m}
}
type pointeeMatcher struct {
wrapped Matcher
}
func (m *pointeeMatcher) Matches(c interface{}) (err error) {
// Make sure the candidate is of the appropriate type.
cv := reflect.ValueOf(c)
if !cv.IsValid() || cv.Kind() != reflect.Ptr {
return NewFatalError("which is not a pointer")
}
// Make sure the candidate is non-nil.
if cv.IsNil() {
return NewFatalError("")
}
// Defer to the wrapped matcher. Fix up empty errors so that failure messages
// are more helpful than just printing a pointer for "Actual".
pointee := cv.Elem().Interface()
err = m.wrapped.Matches(pointee)
if err != nil && err.Error() == "" {
s := fmt.Sprintf("whose pointee is %v", pointee)
if _, ok := err.(*FatalError); ok {
err = NewFatalError(s)
} else {
err = errors.New(s)
}
}
return err
}
func (m *pointeeMatcher) Description() string {
return fmt.Sprintf("pointee(%s)", m.wrapped.Description())
}
| vendor/github.com/smartystreets/assertions/internal/oglematchers/pointee.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00020083863637410104,
0.0001776761346263811,
0.00016489758854731917,
0.00017614637908991426,
0.000011964862096647266
] |
{
"id": 2,
"code_window": [
"\tcase \"__timeFrom\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"FROM_UNIXTIME(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) != 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `'\" `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/mysql/macros.go",
"type": "add",
"edit_start_line_idx": 75
} | ##################### Grafana Configuration Example #####################
#
# Everything has defaults so you only need to uncomment things you want to
# change
# possible values : production, development
; app_mode = production
# instance name, defaults to HOSTNAME environment variable value or hostname if HOSTNAME var is empty
; instance_name = ${HOSTNAME}
#################################### Paths ####################################
[paths]
# Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used)
#
;data = /var/lib/grafana
#
# Directory where grafana can store logs
#
;logs = /var/log/grafana
#
# Directory where grafana will automatically scan and look for plugins
#
;plugins = /var/lib/grafana/plugins
#
#################################### Server ####################################
[server]
# Protocol (http, https, socket)
;protocol = http
# The ip address to bind to, empty will bind to all interfaces
;http_addr =
# The http port to use
;http_port = 3000
# The public facing domain name used to access grafana from a browser
;domain = localhost
# Redirect to correct domain if host header does not match domain
# Prevents DNS rebinding attacks
;enforce_domain = false
# The full public facing url you use in browser, used for redirects and emails
# If you use reverse proxy and sub path specify full url (with sub path)
;root_url = http://localhost:3000
# Log web requests
;router_logging = false
# the path relative working path
;static_root_path = public
# enable gzip
;enable_gzip = false
# https certs & key file
;cert_file =
;cert_key =
# Unix socket path
;socket =
#################################### Database ####################################
[database]
# You can configure the database connection by specifying type, host, name, user and password
# as seperate properties or as on string using the url propertie.
# Either "mysql", "postgres" or "sqlite3", it's your choice
;type = sqlite3
;host = 127.0.0.1:3306
;name = grafana
;user = root
# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;"""
;password =
# Use either URL or the previous fields to configure the database
# Example: mysql://user:secret@host:port/database
;url =
# For "postgres" only, either "disable", "require" or "verify-full"
;ssl_mode = disable
# For "sqlite3" only, path relative to data_path setting
;path = grafana.db
# Max idle conn setting default is 2
;max_idle_conn = 2
# Max conn setting default is 0 (mean not set)
;max_open_conn =
#################################### Session ####################################
[session]
# Either "memory", "file", "redis", "mysql", "postgres", default is "file"
;provider = file
# Provider config options
# memory: not have any config yet
# file: session dir path, is relative to grafana data_path
# redis: config like redis server e.g. `addr=127.0.0.1:6379,pool_size=100,db=grafana`
# mysql: go-sql-driver/mysql dsn config string, e.g. `user:password@tcp(127.0.0.1:3306)/database_name`
# postgres: user=a password=b host=localhost port=5432 dbname=c sslmode=disable
;provider_config = sessions
# Session cookie name
;cookie_name = grafana_sess
# If you use session in https only, default is false
;cookie_secure = false
# Session life time, default is 86400
;session_life_time = 86400
#################################### Data proxy ###########################
[dataproxy]
# This enables data proxy logging, default is false
;logging = false
#################################### Analytics ####################################
[analytics]
# Server reporting, sends usage counters to stats.grafana.org every 24 hours.
# No ip addresses are being tracked, only simple counters to track
# running instances, dashboard and error counts. It is very helpful to us.
# Change this option to false to disable reporting.
;reporting_enabled = true
# Set to false to disable all checks to https://grafana.net
# for new vesions (grafana itself and plugins), check is used
# in some UI views to notify that grafana or plugin update exists
# This option does not cause any auto updates, nor send any information
# only a GET request to http://grafana.com to get latest versions
;check_for_updates = true
# Google Analytics universal tracking code, only enabled if you specify an id here
;google_analytics_ua_id =
#################################### Security ####################################
[security]
# default admin user, created on startup
;admin_user = admin
# default admin password, can be changed before first start of grafana, or in profile settings
;admin_password = admin
# used for signing
;secret_key = SW2YcwTIb9zpOOhoPsMm
# Auto-login remember days
;login_remember_days = 7
;cookie_username = grafana_user
;cookie_remember_name = grafana_remember
# disable gravatar profile images
;disable_gravatar = false
# data source proxy whitelist (ip_or_domain:port separated by spaces)
;data_source_proxy_whitelist =
[snapshots]
# snapshot sharing options
;external_enabled = true
;external_snapshot_url = https://snapshots-origin.raintank.io
;external_snapshot_name = Publish to snapshot.raintank.io
# remove expired snapshot
;snapshot_remove_expired = true
# remove snapshots after 90 days
;snapshot_TTL_days = 90
#################################### Users ####################################
[users]
# disable user signup / registration
;allow_sign_up = true
# Allow non admin users to create organizations
;allow_org_create = true
# Set to true to automatically assign new users to the default organization (id 1)
;auto_assign_org = true
# Default role new users will be automatically assigned (if disabled above is set to true)
;auto_assign_org_role = Viewer
# Background text for the user field on the login page
;login_hint = email or username
# Default UI theme ("dark" or "light")
;default_theme = dark
# External user management, these options affect the organization users view
;external_manage_link_url =
;external_manage_link_name =
;external_manage_info =
[auth]
# Set to true to disable (hide) the login form, useful if you use OAuth, defaults to false
;disable_login_form = false
# Set to true to disable the signout link in the side menu. useful if you use auth.proxy, defaults to false
;disable_signout_menu = false
#################################### Anonymous Auth ##########################
[auth.anonymous]
# enable anonymous access
;enabled = false
# specify organization name that should be used for unauthenticated users
;org_name = Main Org.
# specify role for unauthenticated users
;org_role = Viewer
#################################### Github Auth ##########################
[auth.github]
;enabled = false
;allow_sign_up = true
;client_id = some_id
;client_secret = some_secret
;scopes = user:email,read:org
;auth_url = https://github.com/login/oauth/authorize
;token_url = https://github.com/login/oauth/access_token
;api_url = https://api.github.com/user
;team_ids =
;allowed_organizations =
#################################### Google Auth ##########################
[auth.google]
;enabled = false
;allow_sign_up = true
;client_id = some_client_id
;client_secret = some_client_secret
;scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
;auth_url = https://accounts.google.com/o/oauth2/auth
;token_url = https://accounts.google.com/o/oauth2/token
;api_url = https://www.googleapis.com/oauth2/v1/userinfo
;allowed_domains =
#################################### Generic OAuth ##########################
[auth.generic_oauth]
;enabled = false
;name = OAuth
;allow_sign_up = true
;client_id = some_id
;client_secret = some_secret
;scopes = user:email,read:org
;auth_url = https://foo.bar/login/oauth/authorize
;token_url = https://foo.bar/login/oauth/access_token
;api_url = https://foo.bar/user
;team_ids =
;allowed_organizations =
#################################### Grafana.com Auth ####################
[auth.grafana_com]
;enabled = false
;allow_sign_up = true
;client_id = some_id
;client_secret = some_secret
;scopes = user:email
;allowed_organizations =
#################################### Auth Proxy ##########################
[auth.proxy]
;enabled = false
;header_name = X-WEBAUTH-USER
;header_property = username
;auto_sign_up = true
;ldap_sync_ttl = 60
;whitelist = 192.168.1.1, 192.168.2.1
#################################### Basic Auth ##########################
[auth.basic]
;enabled = true
#################################### Auth LDAP ##########################
[auth.ldap]
;enabled = false
;config_file = /etc/grafana/ldap.toml
;allow_sign_up = true
#################################### SMTP / Emailing ##########################
[smtp]
;enabled = false
;host = localhost:25
;user =
# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;"""
;password =
;cert_file =
;key_file =
;skip_verify = false
;from_address = [email protected]
;from_name = Grafana
# EHLO identity in SMTP dialog (defaults to instance_name)
;ehlo_identity = dashboard.example.com
[emails]
;welcome_email_on_sign_up = false
#################################### Logging ##########################
[log]
# Either "console", "file", "syslog". Default is console and file
# Use space to separate multiple modes, e.g. "console file"
;mode = console file
# Either "debug", "info", "warn", "error", "critical", default is "info"
;level = info
# optional settings to set different levels for specific loggers. Ex filters = sqlstore:debug
;filters =
# For "console" mode only
[log.console]
;level =
# log line format, valid options are text, console and json
;format = console
# For "file" mode only
[log.file]
;level =
# log line format, valid options are text, console and json
;format = text
# This enables automated log rotate(switch of following options), default is true
;log_rotate = true
# Max line number of single file, default is 1000000
;max_lines = 1000000
# Max size shift of single file, default is 28 means 1 << 28, 256MB
;max_size_shift = 28
# Segment log daily, default is true
;daily_rotate = true
# Expired days of log file(delete after max days), default is 7
;max_days = 7
[log.syslog]
;level =
# log line format, valid options are text, console and json
;format = text
# Syslog network type and address. This can be udp, tcp, or unix. If left blank, the default unix endpoints will be used.
;network =
;address =
# Syslog facility. user, daemon and local0 through local7 are valid.
;facility =
# Syslog tag. By default, the process' argv[0] is used.
;tag =
;#################################### Dashboard JSON files ##########################
[dashboards.json]
;enabled = false
;path = /var/lib/grafana/dashboards
#################################### Alerting ############################
[alerting]
# Disable alerting engine & UI features
;enabled = true
# Makes it possible to turn off alert rule execution but alerting UI is visible
;execute_alerts = true
#################################### Internal Grafana Metrics ##########################
# Metrics available at HTTP API Url /api/metrics
[metrics]
# Disable / Enable internal metrics
;enabled = true
# Publish interval
;interval_seconds = 10
# Send internal metrics to Graphite
[metrics.graphite]
# Enable by setting the address setting (ex localhost:2003)
;address =
;prefix = prod.grafana.%(instance_name)s.
#################################### Distributed tracing ############
[tracing.jaeger]
# Enable by setting the address sending traces to jaeger (ex localhost:6831)
;address = localhost:6831
# Tag that will always be included in when creating new spans. ex (tag1:value1,tag2:value2)
;always_included_tag = tag1:value1
# Type specifies the type of the sampler: const, probabilistic, rateLimiting, or remote
;sampler_type = const
# jaeger samplerconfig param
# for "const" sampler, 0 or 1 for always false/true respectively
# for "probabilistic" sampler, a probability between 0 and 1
# for "rateLimiting" sampler, the number of spans per second
# for "remote" sampler, param is the same as for "probabilistic"
# and indicates the initial sampling rate before the actual one
# is received from the mothership
;sampler_param = 1
#################################### Grafana.com integration ##########################
# Url used to to import dashboards directly from Grafana.com
[grafana_com]
;url = https://grafana.com
#################################### External image storage ##########################
[external_image_storage]
# Used for uploading images to public servers so they can be included in slack/email messages.
# you can choose between (s3, webdav, gcs)
;provider =
[external_image_storage.s3]
;bucket =
;region =
;path =
;access_key =
;secret_key =
[external_image_storage.webdav]
;url =
;public_url =
;username =
;password =
[external_image_storage.gcs]
;key_file =
;bucket =
| conf/sample.ini | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00024471181677654386,
0.00016988090646918863,
0.00015792730846442282,
0.00016856755246408284,
0.000012094385965610854
] |
{
"id": 3,
"code_window": [
"\t\t\tSo(sql, ShouldEqual, \"select FROM_UNIXTIME(18446744066914186738)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)\")\n",
"\t\t})\n",
"\n"
],
"file_path": "pkg/tsdb/mysql/macros_test.go",
"type": "add",
"edit_start_line_idx": 42
} | <query-editor-row query-ctrl="ctrl" can-collapse="false">
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<code-editor content="ctrl.target.rawSql" datasource="ctrl.datasource" on-change="ctrl.panelCtrl.refresh()" data-mode="sql">
</code-editor>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword">Format as</label>
<div class="gf-form-select-wrapper">
<select class="gf-form-input gf-size-auto" ng-model="ctrl.target.format" ng-options="f.value as f.text for f in ctrl.formats" ng-change="ctrl.refresh()"></select>
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
Show Help
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryMeta">
<label class="gf-form-label query-keyword" ng-click="ctrl.showLastQuerySQL = !ctrl.showLastQuerySQL">
Generated SQL
<i class="fa fa-caret-down" ng-show="ctrl.showLastQuerySQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showLastQuerySQL"></i>
</label>
</div>
<div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showLastQuerySQL">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.sql}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named <i>time</i> (UTC in seconds or timestamp)
- return column(s) with numeric datatype as values
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
Table:
- return any set of columns
Macros:
- $__time(column) -> column as "time"
- $__timeEpoch -> extract(epoch from column) as "time"
- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)
- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877
To group by time use $__timeGroup:
-> (extract(epoch from column)/extract(epoch from column::interval))::int
Example of group by and order by with $__timeGroup:
SELECT
min(date_time_col) AS time_sec,
sum(value_double) as value
FROM yourtable
group by $__timeGroup(date_time_col, '1h')
order by $__timeGroup(date_time_col, '1h') ASC
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -> to_timestamp(1492750877)
- $__timeTo() -> to_timestamp(1492750877)
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
</pre>
</div>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
| public/app/plugins/datasource/postgres/partials/query.editor.html | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0008668816881254315,
0.0002593037788756192,
0.00016314843378495425,
0.00017195170221384615,
0.00022974281455390155
] |
{
"id": 3,
"code_window": [
"\t\t\tSo(sql, ShouldEqual, \"select FROM_UNIXTIME(18446744066914186738)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)\")\n",
"\t\t})\n",
"\n"
],
"file_path": "pkg/tsdb/mysql/macros_test.go",
"type": "add",
"edit_start_line_idx": 42
} | to_number(`false`) | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-171 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017521785048302263,
0.00017521785048302263,
0.00017521785048302263,
0.00017521785048302263,
0
] |
{
"id": 3,
"code_window": [
"\t\t\tSo(sql, ShouldEqual, \"select FROM_UNIXTIME(18446744066914186738)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)\")\n",
"\t\t})\n",
"\n"
],
"file_path": "pkg/tsdb/mysql/macros_test.go",
"type": "add",
"edit_start_line_idx": 42
} | package dashdiffs
import (
"bytes"
"errors"
"fmt"
"html/template"
"sort"
diff "github.com/yudai/gojsondiff"
)
type ChangeType int
const (
ChangeNil ChangeType = iota
ChangeAdded
ChangeDeleted
ChangeOld
ChangeNew
ChangeUnchanged
)
var (
// changeTypeToSymbol is used for populating the terminating characer in
// the diff
changeTypeToSymbol = map[ChangeType]string{
ChangeNil: "",
ChangeAdded: "+",
ChangeDeleted: "-",
ChangeOld: "-",
ChangeNew: "+",
}
// changeTypeToName is used for populating class names in the diff
changeTypeToName = map[ChangeType]string{
ChangeNil: "same",
ChangeAdded: "added",
ChangeDeleted: "deleted",
ChangeOld: "old",
ChangeNew: "new",
}
)
var (
// tplJSONDiffWrapper is the template that wraps a diff
tplJSONDiffWrapper = `{{ define "JSONDiffWrapper" -}}
{{ range $index, $element := . }}
{{ template "JSONDiffLine" $element }}
{{ end }}
{{ end }}`
// tplJSONDiffLine is the template that prints each line in a diff
tplJSONDiffLine = `{{ define "JSONDiffLine" -}}
<p id="l{{ .LineNum }}" class="diff-line diff-json-{{ cton .Change }}">
<span class="diff-line-number">
{{if .LeftLine }}{{ .LeftLine }}{{ end }}
</span>
<span class="diff-line-number">
{{if .RightLine }}{{ .RightLine }}{{ end }}
</span>
<span class="diff-value diff-indent-{{ .Indent }}" title="{{ .Text }}">
{{ .Text }}
</span>
<span class="diff-line-icon">{{ ctos .Change }}</span>
</p>
{{ end }}`
)
var diffTplFuncs = template.FuncMap{
"ctos": func(c ChangeType) string {
if symbol, ok := changeTypeToSymbol[c]; ok {
return symbol
}
return ""
},
"cton": func(c ChangeType) string {
if name, ok := changeTypeToName[c]; ok {
return name
}
return ""
},
}
// JSONLine contains the data required to render each line of the JSON diff
// and contains the data required to produce the tokens output in the basic
// diff.
type JSONLine struct {
LineNum int `json:"line"`
LeftLine int `json:"leftLine"`
RightLine int `json:"rightLine"`
Indent int `json:"indent"`
Text string `json:"text"`
Change ChangeType `json:"changeType"`
Key string `json:"key"`
Val interface{} `json:"value"`
}
func NewJSONFormatter(left interface{}) *JSONFormatter {
tpl := template.Must(template.New("JSONDiffWrapper").Funcs(diffTplFuncs).Parse(tplJSONDiffWrapper))
tpl = template.Must(tpl.New("JSONDiffLine").Funcs(diffTplFuncs).Parse(tplJSONDiffLine))
return &JSONFormatter{
left: left,
Lines: []*JSONLine{},
tpl: tpl,
path: []string{},
size: []int{},
lineCount: 0,
inArray: []bool{},
}
}
type JSONFormatter struct {
left interface{}
path []string
size []int
inArray []bool
lineCount int
leftLine int
rightLine int
line *AsciiLine
Lines []*JSONLine
tpl *template.Template
}
type AsciiLine struct {
// the type of change
change ChangeType
// the actual changes - no formatting
key string
val interface{}
// level of indentation for the current line
indent int
// buffer containing the fully formatted line
buffer *bytes.Buffer
}
func (f *JSONFormatter) Format(diff diff.Diff) (result string, err error) {
if v, ok := f.left.(map[string]interface{}); ok {
f.formatObject(v, diff)
} else if v, ok := f.left.([]interface{}); ok {
f.formatArray(v, diff)
} else {
return "", fmt.Errorf("expected map[string]interface{} or []interface{}, got %T",
f.left)
}
b := &bytes.Buffer{}
err = f.tpl.ExecuteTemplate(b, "JSONDiffWrapper", f.Lines)
if err != nil {
fmt.Printf("%v\n", err)
return "", err
}
return b.String(), nil
}
func (f *JSONFormatter) formatObject(left map[string]interface{}, df diff.Diff) {
f.addLineWith(ChangeNil, "{")
f.push("ROOT", len(left), false)
f.processObject(left, df.Deltas())
f.pop()
f.addLineWith(ChangeNil, "}")
}
func (f *JSONFormatter) formatArray(left []interface{}, df diff.Diff) {
f.addLineWith(ChangeNil, "[")
f.push("ROOT", len(left), true)
f.processArray(left, df.Deltas())
f.pop()
f.addLineWith(ChangeNil, "]")
}
func (f *JSONFormatter) processArray(array []interface{}, deltas []diff.Delta) error {
patchedIndex := 0
for index, value := range array {
f.processItem(value, deltas, diff.Index(index))
patchedIndex++
}
// additional Added
for _, delta := range deltas {
switch delta.(type) {
case *diff.Added:
d := delta.(*diff.Added)
// skip items already processed
if int(d.Position.(diff.Index)) < len(array) {
continue
}
f.printRecursive(d.Position.String(), d.Value, ChangeAdded)
}
}
return nil
}
func (f *JSONFormatter) processObject(object map[string]interface{}, deltas []diff.Delta) error {
names := sortKeys(object)
for _, name := range names {
value := object[name]
f.processItem(value, deltas, diff.Name(name))
}
// Added
for _, delta := range deltas {
switch delta.(type) {
case *diff.Added:
d := delta.(*diff.Added)
f.printRecursive(d.Position.String(), d.Value, ChangeAdded)
}
}
return nil
}
func (f *JSONFormatter) processItem(value interface{}, deltas []diff.Delta, position diff.Position) error {
matchedDeltas := f.searchDeltas(deltas, position)
positionStr := position.String()
if len(matchedDeltas) > 0 {
for _, matchedDelta := range matchedDeltas {
switch matchedDelta.(type) {
case *diff.Object:
d := matchedDelta.(*diff.Object)
switch value.(type) {
case map[string]interface{}:
//ok
default:
return errors.New("Type mismatch")
}
o := value.(map[string]interface{})
f.newLine(ChangeNil)
f.printKey(positionStr)
f.print("{")
f.closeLine()
f.push(positionStr, len(o), false)
f.processObject(o, d.Deltas)
f.pop()
f.newLine(ChangeNil)
f.print("}")
f.printComma()
f.closeLine()
case *diff.Array:
d := matchedDelta.(*diff.Array)
switch value.(type) {
case []interface{}:
//ok
default:
return errors.New("Type mismatch")
}
a := value.([]interface{})
f.newLine(ChangeNil)
f.printKey(positionStr)
f.print("[")
f.closeLine()
f.push(positionStr, len(a), true)
f.processArray(a, d.Deltas)
f.pop()
f.newLine(ChangeNil)
f.print("]")
f.printComma()
f.closeLine()
case *diff.Added:
d := matchedDelta.(*diff.Added)
f.printRecursive(positionStr, d.Value, ChangeAdded)
f.size[len(f.size)-1]++
case *diff.Modified:
d := matchedDelta.(*diff.Modified)
savedSize := f.size[len(f.size)-1]
f.printRecursive(positionStr, d.OldValue, ChangeOld)
f.size[len(f.size)-1] = savedSize
f.printRecursive(positionStr, d.NewValue, ChangeNew)
case *diff.TextDiff:
savedSize := f.size[len(f.size)-1]
d := matchedDelta.(*diff.TextDiff)
f.printRecursive(positionStr, d.OldValue, ChangeOld)
f.size[len(f.size)-1] = savedSize
f.printRecursive(positionStr, d.NewValue, ChangeNew)
case *diff.Deleted:
d := matchedDelta.(*diff.Deleted)
f.printRecursive(positionStr, d.Value, ChangeDeleted)
default:
return errors.New("Unknown Delta type detected")
}
}
} else {
f.printRecursive(positionStr, value, ChangeUnchanged)
}
return nil
}
func (f *JSONFormatter) searchDeltas(deltas []diff.Delta, position diff.Position) (results []diff.Delta) {
results = make([]diff.Delta, 0)
for _, delta := range deltas {
switch delta.(type) {
case diff.PostDelta:
if delta.(diff.PostDelta).PostPosition() == position {
results = append(results, delta)
}
case diff.PreDelta:
if delta.(diff.PreDelta).PrePosition() == position {
results = append(results, delta)
}
default:
panic("heh")
}
}
return
}
func (f *JSONFormatter) push(name string, size int, array bool) {
f.path = append(f.path, name)
f.size = append(f.size, size)
f.inArray = append(f.inArray, array)
}
func (f *JSONFormatter) pop() {
f.path = f.path[0 : len(f.path)-1]
f.size = f.size[0 : len(f.size)-1]
f.inArray = f.inArray[0 : len(f.inArray)-1]
}
func (f *JSONFormatter) addLineWith(change ChangeType, value string) {
f.line = &AsciiLine{
change: change,
indent: len(f.path),
buffer: bytes.NewBufferString(value),
}
f.closeLine()
}
func (f *JSONFormatter) newLine(change ChangeType) {
f.line = &AsciiLine{
change: change,
indent: len(f.path),
buffer: bytes.NewBuffer([]byte{}),
}
}
func (f *JSONFormatter) closeLine() {
leftLine := 0
rightLine := 0
f.lineCount++
switch f.line.change {
case ChangeAdded, ChangeNew:
f.rightLine++
rightLine = f.rightLine
case ChangeDeleted, ChangeOld:
f.leftLine++
leftLine = f.leftLine
case ChangeNil, ChangeUnchanged:
f.rightLine++
f.leftLine++
rightLine = f.rightLine
leftLine = f.leftLine
}
s := f.line.buffer.String()
f.Lines = append(f.Lines, &JSONLine{
LineNum: f.lineCount,
RightLine: rightLine,
LeftLine: leftLine,
Indent: f.line.indent,
Text: s,
Change: f.line.change,
Key: f.line.key,
Val: f.line.val,
})
}
func (f *JSONFormatter) printKey(name string) {
if !f.inArray[len(f.inArray)-1] {
f.line.key = name
fmt.Fprintf(f.line.buffer, `"%s": `, name)
}
}
func (f *JSONFormatter) printComma() {
f.size[len(f.size)-1]--
if f.size[len(f.size)-1] > 0 {
f.line.buffer.WriteRune(',')
}
}
func (f *JSONFormatter) printValue(value interface{}) {
switch value.(type) {
case string:
f.line.val = value
fmt.Fprintf(f.line.buffer, `"%s"`, value)
case nil:
f.line.val = "null"
f.line.buffer.WriteString("null")
default:
f.line.val = value
fmt.Fprintf(f.line.buffer, `%#v`, value)
}
}
func (f *JSONFormatter) print(a string) {
f.line.buffer.WriteString(a)
}
func (f *JSONFormatter) printRecursive(name string, value interface{}, change ChangeType) {
switch value.(type) {
case map[string]interface{}:
f.newLine(change)
f.printKey(name)
f.print("{")
f.closeLine()
m := value.(map[string]interface{})
size := len(m)
f.push(name, size, false)
keys := sortKeys(m)
for _, key := range keys {
f.printRecursive(key, m[key], change)
}
f.pop()
f.newLine(change)
f.print("}")
f.printComma()
f.closeLine()
case []interface{}:
f.newLine(change)
f.printKey(name)
f.print("[")
f.closeLine()
s := value.([]interface{})
size := len(s)
f.push("", size, true)
for _, item := range s {
f.printRecursive("", item, change)
}
f.pop()
f.newLine(change)
f.print("]")
f.printComma()
f.closeLine()
default:
f.newLine(change)
f.printKey(name)
f.printValue(value)
f.printComma()
f.closeLine()
}
}
func sortKeys(m map[string]interface{}) (keys []string) {
keys = make([]string, 0, len(m))
for key := range m {
keys = append(keys, key)
}
sort.Strings(keys)
return
}
| pkg/components/dashdiffs/formatter_json.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.000267396098934114,
0.0001743403699947521,
0.00016329505888279527,
0.00017276970902457833,
0.000014152098628983367
] |
{
"id": 3,
"code_window": [
"\t\t\tSo(sql, ShouldEqual, \"select FROM_UNIXTIME(18446744066914186738)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)\")\n",
"\t\t})\n",
"\n"
],
"file_path": "pkg/tsdb/mysql/macros_test.go",
"type": "add",
"edit_start_line_idx": 42
} | package graphite
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
"regexp"
"strings"
"golang.org/x/net/context/ctxhttp"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
opentracing "github.com/opentracing/opentracing-go"
)
type GraphiteExecutor struct {
HttpClient *http.Client
}
func NewGraphiteExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return &GraphiteExecutor{}, nil
}
var (
glog log.Logger
)
func init() {
glog = log.New("tsdb.graphite")
tsdb.RegisterTsdbQueryEndpoint("graphite", NewGraphiteExecutor)
}
func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
result := &tsdb.Response{}
from := "-" + formatTimeRange(tsdbQuery.TimeRange.From)
until := formatTimeRange(tsdbQuery.TimeRange.To)
var target string
formData := url.Values{
"from": []string{from},
"until": []string{until},
"format": []string{"json"},
"maxDataPoints": []string{"500"},
}
for _, query := range tsdbQuery.Queries {
if fullTarget, err := query.Model.Get("targetFull").String(); err == nil {
target = fixIntervalFormat(fullTarget)
} else {
target = fixIntervalFormat(query.Model.Get("target").MustString())
}
}
formData["target"] = []string{target}
if setting.Env == setting.DEV {
glog.Debug("Graphite request", "params", formData)
}
req, err := e.createRequest(dsInfo, formData)
if err != nil {
return nil, err
}
httpClient, err := dsInfo.GetHttpClient()
if err != nil {
return nil, err
}
span, ctx := opentracing.StartSpanFromContext(ctx, "graphite query")
span.SetTag("target", target)
span.SetTag("from", from)
span.SetTag("until", until)
defer span.Finish()
opentracing.GlobalTracer().Inject(
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(req.Header))
res, err := ctxhttp.Do(ctx, httpClient, req)
if err != nil {
return nil, err
}
data, err := e.parseResponse(res)
if err != nil {
return nil, err
}
result.Results = make(map[string]*tsdb.QueryResult)
queryRes := tsdb.NewQueryResult()
for _, series := range data {
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: series.Target,
Points: series.DataPoints,
})
if setting.Env == setting.DEV {
glog.Debug("Graphite response", "target", series.Target, "datapoints", len(series.DataPoints))
}
}
result.Results["A"] = queryRes
return result, nil
}
func (e *GraphiteExecutor) parseResponse(res *http.Response) ([]TargetResponseDTO, error) {
body, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return nil, err
}
if res.StatusCode/100 != 2 {
glog.Info("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("Request failed status: %v", res.Status)
}
var data []TargetResponseDTO
err = json.Unmarshal(body, &data)
if err != nil {
glog.Info("Failed to unmarshal graphite response", "error", err, "status", res.Status, "body", string(body))
return nil, err
}
return data, nil
}
func (e *GraphiteExecutor) createRequest(dsInfo *models.DataSource, data url.Values) (*http.Request, error) {
u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "render")
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(data.Encode()))
if err != nil {
glog.Info("Failed to create request", "error", err)
return nil, fmt.Errorf("Failed to create request. error: %v", err)
}
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
if dsInfo.BasicAuth {
req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword)
}
return req, err
}
func formatTimeRange(input string) string {
if input == "now" {
return input
}
return strings.Replace(strings.Replace(input, "m", "min", -1), "M", "mon", -1)
}
func fixIntervalFormat(target string) string {
rMinute := regexp.MustCompile(`'(\d+)m'`)
rMin := regexp.MustCompile("m")
target = rMinute.ReplaceAllStringFunc(target, func(m string) string {
return rMin.ReplaceAllString(m, "min")
})
rMonth := regexp.MustCompile(`'(\d+)M'`)
rMon := regexp.MustCompile("M")
target = rMonth.ReplaceAllStringFunc(target, func(M string) string {
return rMon.ReplaceAllString(M, "mon")
})
return target
}
| pkg/tsdb/graphite/graphite.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0009176740422844887,
0.00021158049639780074,
0.00016265510930679739,
0.0001717207778710872,
0.00017129137995652854
] |
{
"id": 4,
"code_window": [
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\t\"strings\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "add",
"edit_start_line_idx": 6
} | <query-editor-row query-ctrl="ctrl" can-collapse="false">
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<code-editor content="ctrl.target.rawSql" datasource="ctrl.datasource" on-change="ctrl.panelCtrl.refresh()" data-mode="sql">
</code-editor>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword">Format as</label>
<div class="gf-form-select-wrapper">
<select class="gf-form-input gf-size-auto" ng-model="ctrl.target.format" ng-options="f.value as f.text for f in ctrl.formats" ng-change="ctrl.refresh()"></select>
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
Show Help
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryMeta">
<label class="gf-form-label query-keyword" ng-click="ctrl.showLastQuerySQL = !ctrl.showLastQuerySQL">
Generated SQL
<i class="fa fa-caret-down" ng-show="ctrl.showLastQuerySQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showLastQuerySQL"></i>
</label>
</div>
<div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showLastQuerySQL">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.sql}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named <i>time</i> (UTC in seconds or timestamp)
- return column(s) with numeric datatype as values
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
Table:
- return any set of columns
Macros:
- $__time(column) -> column as "time"
- $__timeEpoch -> extract(epoch from column) as "time"
- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)
- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877
To group by time use $__timeGroup:
-> (extract(epoch from column)/extract(epoch from column::interval))::int
Example of group by and order by with $__timeGroup:
SELECT
min(date_time_col) AS time_sec,
sum(value_double) as value
FROM yourtable
group by $__timeGroup(date_time_col, '1h')
order by $__timeGroup(date_time_col, '1h') ASC
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -> to_timestamp(1492750877)
- $__timeTo() -> to_timestamp(1492750877)
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
</pre>
</div>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
| public/app/plugins/datasource/postgres/partials/query.editor.html | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017014829791150987,
0.00016853862325660884,
0.0001667505275690928,
0.0001685891766101122,
0.0000010974401902785758
] |
{
"id": 4,
"code_window": [
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\t\"strings\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "add",
"edit_start_line_idx": 6
} | elasticsearch1:
image: elasticsearch:1.7.6
command: elasticsearch -Des.network.host=0.0.0.0
ports:
- "11200:9200"
- "11300:9300"
volumes:
- ./blocks/elastic/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml
| docker/blocks/elastic1/fig | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00016758257697802037,
0.00016758257697802037,
0.00016758257697802037,
0.00016758257697802037,
0
] |
{
"id": 4,
"code_window": [
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\t\"strings\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "add",
"edit_start_line_idx": 6
} | *.sub1.foo | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-590 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0001776055432856083,
0.0001776055432856083,
0.0001776055432856083,
0.0001776055432856083,
0
] |
{
"id": 4,
"code_window": [
"\t\"fmt\"\n",
"\t\"regexp\"\n",
"\t\"strings\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/tsdb\"\n",
")\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"time\"\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "add",
"edit_start_line_idx": 6
} | // Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build darwin dragonfly freebsd linux netbsd openbsd solaris
package unix
func itoa(val int) string { // do it here rather than with fmt to avoid dependency
if val < 0 {
return "-" + uitoa(uint(-val))
}
return uitoa(uint(val))
}
func uitoa(val uint) string {
var buf [32]byte // big enough for int64
i := len(buf) - 1
for val >= 10 {
buf[i] = byte(val%10 + '0')
i--
val /= 10
}
buf[i] = byte(val + '0')
return string(buf[i:])
}
| vendor/golang.org/x/sys/unix/str.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0001824788050726056,
0.00017614102398511022,
0.00017111179477069527,
0.00017483248666394502,
0.0000047319090299424715
] |
{
"id": 5,
"code_window": [
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"to_timestamp(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) < 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
"\t\tif len(args) != 2 {\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 82
} | package postgres
import (
"testing"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func TestMacroEngine(t *testing.T) {
Convey("MacroEngine", t, func() {
engine := &PostgresMacroEngine{}
timeRange := &tsdb.TimeRange{From: "5m", To: "now"}
Convey("interpolate __time function", func() {
sql, err := engine.Interpolate(nil, "select $__time(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select time_column AS \"time\"")
})
Convey("interpolate __time function wrapped in aggregation", func() {
sql, err := engine.Interpolate(nil, "select min($__time(time_column))")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select min(time_column AS \"time\")")
})
Convey("interpolate __timeFilter function", func() {
sql, err := engine.Interpolate(timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "WHERE extract(epoch from time_column) BETWEEN 18446744066914186738 AND 18446744066914187038")
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select to_timestamp(18446744066914186738)")
})
Convey("interpolate __timeGroup function", func() {
sql, err := engine.Interpolate(timeRange, "GROUP BY $__timeGroup(time_column,'5m')")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "GROUP BY (extract(epoch from \"time_column\")/extract(epoch from '5m'::interval))::int*extract(epoch from '5m'::interval)")
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select to_timestamp(18446744066914187038)")
})
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(timeRange, "select $__unixEpochFilter(18446744066914186738)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038")
})
Convey("interpolate __unixEpochFrom function", func() {
sql, err := engine.Interpolate(timeRange, "select $__unixEpochFrom()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914186738")
})
Convey("interpolate __unixEpochTo function", func() {
sql, err := engine.Interpolate(timeRange, "select $__unixEpochTo()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914187038")
})
})
}
| pkg/tsdb/postgres/macros_test.go | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0034188791178166866,
0.0005787528934888542,
0.0001662375871092081,
0.00018678307242225856,
0.001007390907034278
] |
{
"id": 5,
"code_window": [
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"to_timestamp(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) < 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
"\t\tif len(args) != 2 {\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 82
} | ---
page_title: Plugin panel
page_description: Panel plugins for Grafana
page_keywords: grafana, plugins, documentation
---
+++
title = "Installing Plugins"
type = "docs"
[menu.docs]
parent = "developing"
weight = 1
+++
# Panels
Panels are the main building blocks of dashboards.
## Panel development
Examples
- [clock-panel](https://github.com/grafana/clock-panel)
- [singlestat-panel](https://github.com/grafana/grafana/blob/master/public/app/plugins/panel/singlestat/module.ts)
| docs/sources/plugins/developing/panels.md | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.000175083230715245,
0.00017228687647730112,
0.00017080240650102496,
0.00017097496311180294,
0.0000019785823042184347
] |
{
"id": 5,
"code_window": [
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"to_timestamp(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) < 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
"\t\tif len(args) != 2 {\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 82
} | // Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package jaeger
import (
"github.com/opentracing/opentracing-go/log"
"github.com/uber/jaeger-client-go/internal/baggage"
)
// baggageSetter is an actor that can set a baggage value on a Span given certain
// restrictions (eg. maxValueLength).
type baggageSetter struct {
restrictionManager baggage.RestrictionManager
metrics *Metrics
}
func newBaggageSetter(restrictionManager baggage.RestrictionManager, metrics *Metrics) *baggageSetter {
return &baggageSetter{
restrictionManager: restrictionManager,
metrics: metrics,
}
}
// (NB) span should hold the lock before making this call
func (s *baggageSetter) setBaggage(span *Span, key, value string) {
var truncated bool
var prevItem string
restriction := s.restrictionManager.GetRestriction(key)
if !restriction.KeyAllowed() {
s.logFields(span, key, value, prevItem, truncated, restriction.KeyAllowed())
s.metrics.BaggageUpdateFailure.Inc(1)
return
}
if len(value) > restriction.MaxValueLength() {
truncated = true
value = value[:restriction.MaxValueLength()]
s.metrics.BaggageTruncate.Inc(1)
}
prevItem = span.context.baggage[key]
s.logFields(span, key, value, prevItem, truncated, restriction.KeyAllowed())
span.context = span.context.WithBaggageItem(key, value)
s.metrics.BaggageUpdateSuccess.Inc(1)
}
func (s *baggageSetter) logFields(span *Span, key, value, prevItem string, truncated, valid bool) {
if !span.context.IsSampled() {
return
}
fields := []log.Field{
log.String("event", "baggage"),
log.String("key", key),
log.String("value", value),
}
if prevItem != "" {
fields = append(fields, log.String("override", "true"))
}
if truncated {
fields = append(fields, log.String("truncated", "true"))
}
if !valid {
fields = append(fields, log.String("invalid", "true"))
}
span.logFieldsNoLocking(fields...)
}
| vendor/github.com/uber/jaeger-client-go/baggage_setter.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017707812367007136,
0.00017182579904329032,
0.00016611510363873094,
0.00017340038903057575,
0.0000035548814594221767
] |
{
"id": 5,
"code_window": [
"\tcase \"__timeTo\":\n",
"\t\treturn fmt.Sprintf(\"to_timestamp(%d)\", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n",
"\tcase \"__timeGroup\":\n",
"\t\tif len(args) < 2 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
"\t\tif len(args) != 2 {\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 82
} | // Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package model
import (
"encoding/json"
"fmt"
"regexp"
"time"
)
// Matcher describes a matches the value of a given label.
type Matcher struct {
Name LabelName `json:"name"`
Value string `json:"value"`
IsRegex bool `json:"isRegex"`
}
func (m *Matcher) UnmarshalJSON(b []byte) error {
type plain Matcher
if err := json.Unmarshal(b, (*plain)(m)); err != nil {
return err
}
if len(m.Name) == 0 {
return fmt.Errorf("label name in matcher must not be empty")
}
if m.IsRegex {
if _, err := regexp.Compile(m.Value); err != nil {
return err
}
}
return nil
}
// Validate returns true iff all fields of the matcher have valid values.
func (m *Matcher) Validate() error {
if !m.Name.IsValid() {
return fmt.Errorf("invalid name %q", m.Name)
}
if m.IsRegex {
if _, err := regexp.Compile(m.Value); err != nil {
return fmt.Errorf("invalid regular expression %q", m.Value)
}
} else if !LabelValue(m.Value).IsValid() || len(m.Value) == 0 {
return fmt.Errorf("invalid value %q", m.Value)
}
return nil
}
// Silence defines the representation of a silence definiton
// in the Prometheus eco-system.
type Silence struct {
ID uint64 `json:"id,omitempty"`
Matchers []*Matcher `json:"matchers"`
StartsAt time.Time `json:"startsAt"`
EndsAt time.Time `json:"endsAt"`
CreatedAt time.Time `json:"createdAt,omitempty"`
CreatedBy string `json:"createdBy"`
Comment string `json:"comment,omitempty"`
}
// Validate returns true iff all fields of the silence have valid values.
func (s *Silence) Validate() error {
if len(s.Matchers) == 0 {
return fmt.Errorf("at least one matcher required")
}
for _, m := range s.Matchers {
if err := m.Validate(); err != nil {
return fmt.Errorf("invalid matcher: %s", err)
}
}
if s.StartsAt.IsZero() {
return fmt.Errorf("start time missing")
}
if s.EndsAt.IsZero() {
return fmt.Errorf("end time missing")
}
if s.EndsAt.Before(s.StartsAt) {
return fmt.Errorf("start time must be before end time")
}
if s.CreatedBy == "" {
return fmt.Errorf("creator information missing")
}
if s.Comment == "" {
return fmt.Errorf("comment missing")
}
if s.CreatedAt.IsZero() {
return fmt.Errorf("creation timestamp missing")
}
return nil
}
| vendor/github.com/prometheus/common/model/silence.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0021584301721304655,
0.0003581794153433293,
0.00016451062401756644,
0.00017289370589423925,
0.0005696121952496469
] |
{
"id": 6,
"code_window": [
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/extract(epoch from %s::interval))::int*extract(epoch from %s::interval)\", args[0], args[1], args[1]), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"%s >= %d AND %s <= %d\", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `' `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/%v)::bigint*%v\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 85
} | <query-editor-row query-ctrl="ctrl" can-collapse="false">
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<code-editor content="ctrl.target.rawSql" datasource="ctrl.datasource" on-change="ctrl.panelCtrl.refresh()" data-mode="sql">
</code-editor>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword">Format as</label>
<div class="gf-form-select-wrapper">
<select class="gf-form-input gf-size-auto" ng-model="ctrl.target.format" ng-options="f.value as f.text for f in ctrl.formats" ng-change="ctrl.refresh()"></select>
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
Show Help
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryMeta">
<label class="gf-form-label query-keyword" ng-click="ctrl.showLastQuerySQL = !ctrl.showLastQuerySQL">
Generated SQL
<i class="fa fa-caret-down" ng-show="ctrl.showLastQuerySQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showLastQuerySQL"></i>
</label>
</div>
<div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showLastQuerySQL">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.sql}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named <i>time</i> (UTC in seconds or timestamp)
- return column(s) with numeric datatype as values
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
Table:
- return any set of columns
Macros:
- $__time(column) -> column as "time"
- $__timeEpoch -> extract(epoch from column) as "time"
- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)
- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877
To group by time use $__timeGroup:
-> (extract(epoch from column)/extract(epoch from column::interval))::int
Example of group by and order by with $__timeGroup:
SELECT
min(date_time_col) AS time_sec,
sum(value_double) as value
FROM yourtable
group by $__timeGroup(date_time_col, '1h')
order by $__timeGroup(date_time_col, '1h') ASC
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -> to_timestamp(1492750877)
- $__timeTo() -> to_timestamp(1492750877)
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
</pre>
</div>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
| public/app/plugins/datasource/postgres/partials/query.editor.html | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00754962395876646,
0.0011336409952491522,
0.00016909830446820706,
0.00017311066039837897,
0.002426908351480961
] |
{
"id": 6,
"code_window": [
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/extract(epoch from %s::interval))::int*extract(epoch from %s::interval)\", args[0], args[1], args[1]), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"%s >= %d AND %s <= %d\", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `' `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/%v)::bigint*%v\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 85
} | a.{foo: bar} | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-553 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0001662493305047974,
0.0001662493305047974,
0.0001662493305047974,
0.0001662493305047974,
0
] |
{
"id": 6,
"code_window": [
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/extract(epoch from %s::interval))::int*extract(epoch from %s::interval)\", args[0], args[1], args[1]), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"%s >= %d AND %s <= %d\", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `' `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/%v)::bigint*%v\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 85
} | package session
import (
"crypto/tls"
"crypto/x509"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/corehandlers"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/credentials/stscreds"
"github.com/aws/aws-sdk-go/aws/defaults"
"github.com/aws/aws-sdk-go/aws/endpoints"
"github.com/aws/aws-sdk-go/aws/request"
)
// A Session provides a central location to create service clients from and
// store configurations and request handlers for those services.
//
// Sessions are safe to create service clients concurrently, but it is not safe
// to mutate the Session concurrently.
//
// The Session satisfies the service client's client.ClientConfigProvider.
type Session struct {
Config *aws.Config
Handlers request.Handlers
}
// New creates a new instance of the handlers merging in the provided configs
// on top of the SDK's default configurations. Once the Session is created it
// can be mutated to modify the Config or Handlers. The Session is safe to be
// read concurrently, but it should not be written to concurrently.
//
// If the AWS_SDK_LOAD_CONFIG environment is set to a truthy value, the New
// method could now encounter an error when loading the configuration. When
// The environment variable is set, and an error occurs, New will return a
// session that will fail all requests reporting the error that occurred while
// loading the session. Use NewSession to get the error when creating the
// session.
//
// If the AWS_SDK_LOAD_CONFIG environment variable is set to a truthy value
// the shared config file (~/.aws/config) will also be loaded, in addition to
// the shared credentials file (~/.aws/credentials). Values set in both the
// shared config, and shared credentials will be taken from the shared
// credentials file.
//
// Deprecated: Use NewSession functions to create sessions instead. NewSession
// has the same functionality as New except an error can be returned when the
// func is called instead of waiting to receive an error until a request is made.
func New(cfgs ...*aws.Config) *Session {
// load initial config from environment
envCfg := loadEnvConfig()
if envCfg.EnableSharedConfig {
s, err := newSession(Options{}, envCfg, cfgs...)
if err != nil {
// Old session.New expected all errors to be discovered when
// a request is made, and would report the errors then. This
// needs to be replicated if an error occurs while creating
// the session.
msg := "failed to create session with AWS_SDK_LOAD_CONFIG enabled. " +
"Use session.NewSession to handle errors occurring during session creation."
// Session creation failed, need to report the error and prevent
// any requests from succeeding.
s = &Session{Config: defaults.Config()}
s.Config.MergeIn(cfgs...)
s.Config.Logger.Log("ERROR:", msg, "Error:", err)
s.Handlers.Validate.PushBack(func(r *request.Request) {
r.Error = err
})
}
return s
}
return deprecatedNewSession(cfgs...)
}
// NewSession returns a new Session created from SDK defaults, config files,
// environment, and user provided config files. Once the Session is created
// it can be mutated to modify the Config or Handlers. The Session is safe to
// be read concurrently, but it should not be written to concurrently.
//
// If the AWS_SDK_LOAD_CONFIG environment variable is set to a truthy value
// the shared config file (~/.aws/config) will also be loaded in addition to
// the shared credentials file (~/.aws/credentials). Values set in both the
// shared config, and shared credentials will be taken from the shared
// credentials file. Enabling the Shared Config will also allow the Session
// to be built with retrieving credentials with AssumeRole set in the config.
//
// See the NewSessionWithOptions func for information on how to override or
// control through code how the Session will be created. Such as specifying the
// config profile, and controlling if shared config is enabled or not.
func NewSession(cfgs ...*aws.Config) (*Session, error) {
opts := Options{}
opts.Config.MergeIn(cfgs...)
return NewSessionWithOptions(opts)
}
// SharedConfigState provides the ability to optionally override the state
// of the session's creation based on the shared config being enabled or
// disabled.
type SharedConfigState int
const (
// SharedConfigStateFromEnv does not override any state of the
// AWS_SDK_LOAD_CONFIG env var. It is the default value of the
// SharedConfigState type.
SharedConfigStateFromEnv SharedConfigState = iota
// SharedConfigDisable overrides the AWS_SDK_LOAD_CONFIG env var value
// and disables the shared config functionality.
SharedConfigDisable
// SharedConfigEnable overrides the AWS_SDK_LOAD_CONFIG env var value
// and enables the shared config functionality.
SharedConfigEnable
)
// Options provides the means to control how a Session is created and what
// configuration values will be loaded.
//
type Options struct {
// Provides config values for the SDK to use when creating service clients
// and making API requests to services. Any value set in with this field
// will override the associated value provided by the SDK defaults,
// environment or config files where relevant.
//
// If not set, configuration values from from SDK defaults, environment,
// config will be used.
Config aws.Config
// Overrides the config profile the Session should be created from. If not
// set the value of the environment variable will be loaded (AWS_PROFILE,
// or AWS_DEFAULT_PROFILE if the Shared Config is enabled).
//
// If not set and environment variables are not set the "default"
// (DefaultSharedConfigProfile) will be used as the profile to load the
// session config from.
Profile string
// Instructs how the Session will be created based on the AWS_SDK_LOAD_CONFIG
// environment variable. By default a Session will be created using the
// value provided by the AWS_SDK_LOAD_CONFIG environment variable.
//
// Setting this value to SharedConfigEnable or SharedConfigDisable
// will allow you to override the AWS_SDK_LOAD_CONFIG environment variable
// and enable or disable the shared config functionality.
SharedConfigState SharedConfigState
// Ordered list of files the session will load configuration from.
// It will override environment variable AWS_SHARED_CREDENTIALS_FILE, AWS_CONFIG_FILE.
SharedConfigFiles []string
// When the SDK's shared config is configured to assume a role with MFA
// this option is required in order to provide the mechanism that will
// retrieve the MFA token. There is no default value for this field. If
// it is not set an error will be returned when creating the session.
//
// This token provider will be called when ever the assumed role's
// credentials need to be refreshed. Within the context of service clients
// all sharing the same session the SDK will ensure calls to the token
// provider are atomic. When sharing a token provider across multiple
// sessions additional synchronization logic is needed to ensure the
// token providers do not introduce race conditions. It is recommend to
// share the session where possible.
//
// stscreds.StdinTokenProvider is a basic implementation that will prompt
// from stdin for the MFA token code.
//
// This field is only used if the shared configuration is enabled, and
// the config enables assume role wit MFA via the mfa_serial field.
AssumeRoleTokenProvider func() (string, error)
// Reader for a custom Credentials Authority (CA) bundle in PEM format that
// the SDK will use instead of the default system's root CA bundle. Use this
// only if you want to replace the CA bundle the SDK uses for TLS requests.
//
// Enabling this option will attempt to merge the Transport into the SDK's HTTP
// client. If the client's Transport is not a http.Transport an error will be
// returned. If the Transport's TLS config is set this option will cause the SDK
// to overwrite the Transport's TLS config's RootCAs value. If the CA
// bundle reader contains multiple certificates all of them will be loaded.
//
// The Session option CustomCABundle is also available when creating sessions
// to also enable this feature. CustomCABundle session option field has priority
// over the AWS_CA_BUNDLE environment variable, and will be used if both are set.
CustomCABundle io.Reader
}
// NewSessionWithOptions returns a new Session created from SDK defaults, config files,
// environment, and user provided config files. This func uses the Options
// values to configure how the Session is created.
//
// If the AWS_SDK_LOAD_CONFIG environment variable is set to a truthy value
// the shared config file (~/.aws/config) will also be loaded in addition to
// the shared credentials file (~/.aws/credentials). Values set in both the
// shared config, and shared credentials will be taken from the shared
// credentials file. Enabling the Shared Config will also allow the Session
// to be built with retrieving credentials with AssumeRole set in the config.
//
// // Equivalent to session.New
// sess := session.Must(session.NewSessionWithOptions(session.Options{}))
//
// // Specify profile to load for the session's config
// sess := session.Must(session.NewSessionWithOptions(session.Options{
// Profile: "profile_name",
// }))
//
// // Specify profile for config and region for requests
// sess := session.Must(session.NewSessionWithOptions(session.Options{
// Config: aws.Config{Region: aws.String("us-east-1")},
// Profile: "profile_name",
// }))
//
// // Force enable Shared Config support
// sess := session.Must(session.NewSessionWithOptions(session.Options{
// SharedConfigState: session.SharedConfigEnable,
// }))
func NewSessionWithOptions(opts Options) (*Session, error) {
var envCfg envConfig
if opts.SharedConfigState == SharedConfigEnable {
envCfg = loadSharedEnvConfig()
} else {
envCfg = loadEnvConfig()
}
if len(opts.Profile) > 0 {
envCfg.Profile = opts.Profile
}
switch opts.SharedConfigState {
case SharedConfigDisable:
envCfg.EnableSharedConfig = false
case SharedConfigEnable:
envCfg.EnableSharedConfig = true
}
if len(envCfg.SharedCredentialsFile) == 0 {
envCfg.SharedCredentialsFile = defaults.SharedCredentialsFilename()
}
if len(envCfg.SharedConfigFile) == 0 {
envCfg.SharedConfigFile = defaults.SharedConfigFilename()
}
// Only use AWS_CA_BUNDLE if session option is not provided.
if len(envCfg.CustomCABundle) != 0 && opts.CustomCABundle == nil {
f, err := os.Open(envCfg.CustomCABundle)
if err != nil {
return nil, awserr.New("LoadCustomCABundleError",
"failed to open custom CA bundle PEM file", err)
}
defer f.Close()
opts.CustomCABundle = f
}
return newSession(opts, envCfg, &opts.Config)
}
// Must is a helper function to ensure the Session is valid and there was no
// error when calling a NewSession function.
//
// This helper is intended to be used in variable initialization to load the
// Session and configuration at startup. Such as:
//
// var sess = session.Must(session.NewSession())
func Must(sess *Session, err error) *Session {
if err != nil {
panic(err)
}
return sess
}
func deprecatedNewSession(cfgs ...*aws.Config) *Session {
cfg := defaults.Config()
handlers := defaults.Handlers()
// Apply the passed in configs so the configuration can be applied to the
// default credential chain
cfg.MergeIn(cfgs...)
if cfg.EndpointResolver == nil {
// An endpoint resolver is required for a session to be able to provide
// endpoints for service client configurations.
cfg.EndpointResolver = endpoints.DefaultResolver()
}
cfg.Credentials = defaults.CredChain(cfg, handlers)
// Reapply any passed in configs to override credentials if set
cfg.MergeIn(cfgs...)
s := &Session{
Config: cfg,
Handlers: handlers,
}
initHandlers(s)
return s
}
func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, error) {
cfg := defaults.Config()
handlers := defaults.Handlers()
// Get a merged version of the user provided config to determine if
// credentials were.
userCfg := &aws.Config{}
userCfg.MergeIn(cfgs...)
// Ordered config files will be loaded in with later files overwriting
// previous config file values.
var cfgFiles []string
if opts.SharedConfigFiles != nil {
cfgFiles = opts.SharedConfigFiles
} else {
cfgFiles = []string{envCfg.SharedConfigFile, envCfg.SharedCredentialsFile}
if !envCfg.EnableSharedConfig {
// The shared config file (~/.aws/config) is only loaded if instructed
// to load via the envConfig.EnableSharedConfig (AWS_SDK_LOAD_CONFIG).
cfgFiles = cfgFiles[1:]
}
}
// Load additional config from file(s)
sharedCfg, err := loadSharedConfig(envCfg.Profile, cfgFiles)
if err != nil {
return nil, err
}
if err := mergeConfigSrcs(cfg, userCfg, envCfg, sharedCfg, handlers, opts); err != nil {
return nil, err
}
s := &Session{
Config: cfg,
Handlers: handlers,
}
initHandlers(s)
// Setup HTTP client with custom cert bundle if enabled
if opts.CustomCABundle != nil {
if err := loadCustomCABundle(s, opts.CustomCABundle); err != nil {
return nil, err
}
}
return s, nil
}
func loadCustomCABundle(s *Session, bundle io.Reader) error {
var t *http.Transport
switch v := s.Config.HTTPClient.Transport.(type) {
case *http.Transport:
t = v
default:
if s.Config.HTTPClient.Transport != nil {
return awserr.New("LoadCustomCABundleError",
"unable to load custom CA bundle, HTTPClient's transport unsupported type", nil)
}
}
if t == nil {
t = &http.Transport{}
}
p, err := loadCertPool(bundle)
if err != nil {
return err
}
if t.TLSClientConfig == nil {
t.TLSClientConfig = &tls.Config{}
}
t.TLSClientConfig.RootCAs = p
s.Config.HTTPClient.Transport = t
return nil
}
func loadCertPool(r io.Reader) (*x509.CertPool, error) {
b, err := ioutil.ReadAll(r)
if err != nil {
return nil, awserr.New("LoadCustomCABundleError",
"failed to read custom CA bundle PEM file", err)
}
p := x509.NewCertPool()
if !p.AppendCertsFromPEM(b) {
return nil, awserr.New("LoadCustomCABundleError",
"failed to load custom CA bundle PEM file", err)
}
return p, nil
}
func mergeConfigSrcs(cfg, userCfg *aws.Config, envCfg envConfig, sharedCfg sharedConfig, handlers request.Handlers, sessOpts Options) error {
// Merge in user provided configuration
cfg.MergeIn(userCfg)
// Region if not already set by user
if len(aws.StringValue(cfg.Region)) == 0 {
if len(envCfg.Region) > 0 {
cfg.WithRegion(envCfg.Region)
} else if envCfg.EnableSharedConfig && len(sharedCfg.Region) > 0 {
cfg.WithRegion(sharedCfg.Region)
}
}
// Configure credentials if not already set
if cfg.Credentials == credentials.AnonymousCredentials && userCfg.Credentials == nil {
if len(envCfg.Creds.AccessKeyID) > 0 {
cfg.Credentials = credentials.NewStaticCredentialsFromCreds(
envCfg.Creds,
)
} else if envCfg.EnableSharedConfig && len(sharedCfg.AssumeRole.RoleARN) > 0 && sharedCfg.AssumeRoleSource != nil {
cfgCp := *cfg
cfgCp.Credentials = credentials.NewStaticCredentialsFromCreds(
sharedCfg.AssumeRoleSource.Creds,
)
if len(sharedCfg.AssumeRole.MFASerial) > 0 && sessOpts.AssumeRoleTokenProvider == nil {
// AssumeRole Token provider is required if doing Assume Role
// with MFA.
return AssumeRoleTokenProviderNotSetError{}
}
cfg.Credentials = stscreds.NewCredentials(
&Session{
Config: &cfgCp,
Handlers: handlers.Copy(),
},
sharedCfg.AssumeRole.RoleARN,
func(opt *stscreds.AssumeRoleProvider) {
opt.RoleSessionName = sharedCfg.AssumeRole.RoleSessionName
// Assume role with external ID
if len(sharedCfg.AssumeRole.ExternalID) > 0 {
opt.ExternalID = aws.String(sharedCfg.AssumeRole.ExternalID)
}
// Assume role with MFA
if len(sharedCfg.AssumeRole.MFASerial) > 0 {
opt.SerialNumber = aws.String(sharedCfg.AssumeRole.MFASerial)
opt.TokenProvider = sessOpts.AssumeRoleTokenProvider
}
},
)
} else if len(sharedCfg.Creds.AccessKeyID) > 0 {
cfg.Credentials = credentials.NewStaticCredentialsFromCreds(
sharedCfg.Creds,
)
} else {
// Fallback to default credentials provider, include mock errors
// for the credential chain so user can identify why credentials
// failed to be retrieved.
cfg.Credentials = credentials.NewCredentials(&credentials.ChainProvider{
VerboseErrors: aws.BoolValue(cfg.CredentialsChainVerboseErrors),
Providers: []credentials.Provider{
&credProviderError{Err: awserr.New("EnvAccessKeyNotFound", "failed to find credentials in the environment.", nil)},
&credProviderError{Err: awserr.New("SharedCredsLoad", fmt.Sprintf("failed to load profile, %s.", envCfg.Profile), nil)},
defaults.RemoteCredProvider(*cfg, handlers),
},
})
}
}
return nil
}
// AssumeRoleTokenProviderNotSetError is an error returned when creating a session when the
// MFAToken option is not set when shared config is configured load assume a
// role with an MFA token.
type AssumeRoleTokenProviderNotSetError struct{}
// Code is the short id of the error.
func (e AssumeRoleTokenProviderNotSetError) Code() string {
return "AssumeRoleTokenProviderNotSetError"
}
// Message is the description of the error
func (e AssumeRoleTokenProviderNotSetError) Message() string {
return fmt.Sprintf("assume role with MFA enabled, but AssumeRoleTokenProvider session option not set.")
}
// OrigErr is the underlying error that caused the failure.
func (e AssumeRoleTokenProviderNotSetError) OrigErr() error {
return nil
}
// Error satisfies the error interface.
func (e AssumeRoleTokenProviderNotSetError) Error() string {
return awserr.SprintError(e.Code(), e.Message(), "", nil)
}
type credProviderError struct {
Err error
}
var emptyCreds = credentials.Value{}
func (c credProviderError) Retrieve() (credentials.Value, error) {
return credentials.Value{}, c.Err
}
func (c credProviderError) IsExpired() bool {
return true
}
func initHandlers(s *Session) {
// Add the Validate parameter handler if it is not disabled.
s.Handlers.Validate.Remove(corehandlers.ValidateParametersHandler)
if !aws.BoolValue(s.Config.DisableParamValidation) {
s.Handlers.Validate.PushBackNamed(corehandlers.ValidateParametersHandler)
}
}
// Copy creates and returns a copy of the current Session, coping the config
// and handlers. If any additional configs are provided they will be merged
// on top of the Session's copied config.
//
// // Create a copy of the current Session, configured for the us-west-2 region.
// sess.Copy(&aws.Config{Region: aws.String("us-west-2")})
func (s *Session) Copy(cfgs ...*aws.Config) *Session {
newSession := &Session{
Config: s.Config.Copy(cfgs...),
Handlers: s.Handlers.Copy(),
}
initHandlers(newSession)
return newSession
}
// ClientConfig satisfies the client.ConfigProvider interface and is used to
// configure the service client instances. Passing the Session to the service
// client's constructor (New) will use this method to configure the client.
func (s *Session) ClientConfig(serviceName string, cfgs ...*aws.Config) client.Config {
// Backwards compatibility, the error will be eaten if user calls ClientConfig
// directly. All SDK services will use ClientconfigWithError.
cfg, _ := s.clientConfigWithErr(serviceName, cfgs...)
return cfg
}
func (s *Session) clientConfigWithErr(serviceName string, cfgs ...*aws.Config) (client.Config, error) {
s = s.Copy(cfgs...)
var resolved endpoints.ResolvedEndpoint
var err error
region := aws.StringValue(s.Config.Region)
if endpoint := aws.StringValue(s.Config.Endpoint); len(endpoint) != 0 {
resolved.URL = endpoints.AddScheme(endpoint, aws.BoolValue(s.Config.DisableSSL))
resolved.SigningRegion = region
} else {
resolved, err = s.Config.EndpointResolver.EndpointFor(
serviceName, region,
func(opt *endpoints.Options) {
opt.DisableSSL = aws.BoolValue(s.Config.DisableSSL)
opt.UseDualStack = aws.BoolValue(s.Config.UseDualStack)
// Support the condition where the service is modeled but its
// endpoint metadata is not available.
opt.ResolveUnknownService = true
},
)
}
return client.Config{
Config: s.Config,
Handlers: s.Handlers,
Endpoint: resolved.URL,
SigningRegion: resolved.SigningRegion,
SigningName: resolved.SigningName,
}, err
}
// ClientConfigNoResolveEndpoint is the same as ClientConfig with the exception
// that the EndpointResolver will not be used to resolve the endpoint. The only
// endpoint set must come from the aws.Config.Endpoint field.
func (s *Session) ClientConfigNoResolveEndpoint(cfgs ...*aws.Config) client.Config {
s = s.Copy(cfgs...)
var resolved endpoints.ResolvedEndpoint
region := aws.StringValue(s.Config.Region)
if ep := aws.StringValue(s.Config.Endpoint); len(ep) > 0 {
resolved.URL = endpoints.AddScheme(ep, aws.BoolValue(s.Config.DisableSSL))
resolved.SigningRegion = region
}
return client.Config{
Config: s.Config,
Handlers: s.Handlers,
Endpoint: resolved.URL,
SigningRegion: resolved.SigningRegion,
SigningName: resolved.SigningName,
}
}
| vendor/github.com/aws/aws-sdk-go/aws/session/session.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.004313213285058737,
0.00024170278629753739,
0.0001594558998476714,
0.00016767480701673776,
0.0005267164669930935
] |
{
"id": 6,
"code_window": [
"\t\t\treturn \"\", fmt.Errorf(\"macro %v needs time column and interval\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/extract(epoch from %s::interval))::int*extract(epoch from %s::interval)\", args[0], args[1], args[1]), nil\n",
"\tcase \"__unixEpochFilter\":\n",
"\t\tif len(args) == 0 {\n",
"\t\t\treturn \"\", fmt.Errorf(\"missing time column argument for macro %v\", name)\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"%s >= %d AND %s <= %d\", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tinterval, err := time.ParseDuration(strings.Trim(args[1], `' `))\n",
"\t\tif err != nil {\n",
"\t\t\treturn \"\", fmt.Errorf(\"error parsing interval %v\", args[1])\n",
"\t\t}\n",
"\t\treturn fmt.Sprintf(\"(extract(epoch from \\\"%s\\\")/%v)::bigint*%v\", args[0], interval.Seconds(), interval.Seconds()), nil\n"
],
"file_path": "pkg/tsdb/postgres/macros.go",
"type": "replace",
"edit_start_line_idx": 85
} | // +build ignore
package main
import (
"bytes"
"fmt"
"go/ast"
"go/parser"
"go/printer"
"go/token"
"io"
"io/ioutil"
"log"
"os"
"reflect"
"strings"
"unicode"
"unicode/utf8"
)
var inFiles = []string{"cpuid.go", "cpuid_test.go"}
var copyFiles = []string{"cpuid_amd64.s", "cpuid_386.s", "detect_ref.go", "detect_intel.go"}
var fileSet = token.NewFileSet()
var reWrites = []rewrite{
initRewrite("CPUInfo -> cpuInfo"),
initRewrite("Vendor -> vendor"),
initRewrite("Flags -> flags"),
initRewrite("Detect -> detect"),
initRewrite("CPU -> cpu"),
}
var excludeNames = map[string]bool{"string": true, "join": true, "trim": true,
// cpuid_test.go
"t": true, "println": true, "logf": true, "log": true, "fatalf": true, "fatal": true,
}
var excludePrefixes = []string{"test", "benchmark"}
func main() {
Package := "private"
parserMode := parser.ParseComments
exported := make(map[string]rewrite)
for _, file := range inFiles {
in, err := os.Open(file)
if err != nil {
log.Fatalf("opening input", err)
}
src, err := ioutil.ReadAll(in)
if err != nil {
log.Fatalf("reading input", err)
}
astfile, err := parser.ParseFile(fileSet, file, src, parserMode)
if err != nil {
log.Fatalf("parsing input", err)
}
for _, rw := range reWrites {
astfile = rw(astfile)
}
// Inspect the AST and print all identifiers and literals.
var startDecl token.Pos
var endDecl token.Pos
ast.Inspect(astfile, func(n ast.Node) bool {
var s string
switch x := n.(type) {
case *ast.Ident:
if x.IsExported() {
t := strings.ToLower(x.Name)
for _, pre := range excludePrefixes {
if strings.HasPrefix(t, pre) {
return true
}
}
if excludeNames[t] != true {
//if x.Pos() > startDecl && x.Pos() < endDecl {
exported[x.Name] = initRewrite(x.Name + " -> " + t)
}
}
case *ast.GenDecl:
if x.Tok == token.CONST && x.Lparen > 0 {
startDecl = x.Lparen
endDecl = x.Rparen
// fmt.Printf("Decl:%s -> %s\n", fileSet.Position(startDecl), fileSet.Position(endDecl))
}
}
if s != "" {
fmt.Printf("%s:\t%s\n", fileSet.Position(n.Pos()), s)
}
return true
})
for _, rw := range exported {
astfile = rw(astfile)
}
var buf bytes.Buffer
printer.Fprint(&buf, fileSet, astfile)
// Remove package documentation and insert information
s := buf.String()
ind := strings.Index(buf.String(), "\npackage cpuid")
s = s[ind:]
s = "// Generated, DO NOT EDIT,\n" +
"// but copy it to your own project and rename the package.\n" +
"// See more at http://github.com/klauspost/cpuid\n" +
s
outputName := Package + string(os.PathSeparator) + file
err = ioutil.WriteFile(outputName, []byte(s), 0644)
if err != nil {
log.Fatalf("writing output: %s", err)
}
log.Println("Generated", outputName)
}
for _, file := range copyFiles {
dst := ""
if strings.HasPrefix(file, "cpuid") {
dst = Package + string(os.PathSeparator) + file
} else {
dst = Package + string(os.PathSeparator) + "cpuid_" + file
}
err := copyFile(file, dst)
if err != nil {
log.Fatalf("copying file: %s", err)
}
log.Println("Copied", dst)
}
}
// CopyFile copies a file from src to dst. If src and dst files exist, and are
// the same, then return success. Copy the file contents from src to dst.
func copyFile(src, dst string) (err error) {
sfi, err := os.Stat(src)
if err != nil {
return
}
if !sfi.Mode().IsRegular() {
// cannot copy non-regular files (e.g., directories,
// symlinks, devices, etc.)
return fmt.Errorf("CopyFile: non-regular source file %s (%q)", sfi.Name(), sfi.Mode().String())
}
dfi, err := os.Stat(dst)
if err != nil {
if !os.IsNotExist(err) {
return
}
} else {
if !(dfi.Mode().IsRegular()) {
return fmt.Errorf("CopyFile: non-regular destination file %s (%q)", dfi.Name(), dfi.Mode().String())
}
if os.SameFile(sfi, dfi) {
return
}
}
err = copyFileContents(src, dst)
return
}
// copyFileContents copies the contents of the file named src to the file named
// by dst. The file will be created if it does not already exist. If the
// destination file exists, all it's contents will be replaced by the contents
// of the source file.
func copyFileContents(src, dst string) (err error) {
in, err := os.Open(src)
if err != nil {
return
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return
}
defer func() {
cerr := out.Close()
if err == nil {
err = cerr
}
}()
if _, err = io.Copy(out, in); err != nil {
return
}
err = out.Sync()
return
}
type rewrite func(*ast.File) *ast.File
// Mostly copied from gofmt
func initRewrite(rewriteRule string) rewrite {
f := strings.Split(rewriteRule, "->")
if len(f) != 2 {
fmt.Fprintf(os.Stderr, "rewrite rule must be of the form 'pattern -> replacement'\n")
os.Exit(2)
}
pattern := parseExpr(f[0], "pattern")
replace := parseExpr(f[1], "replacement")
return func(p *ast.File) *ast.File { return rewriteFile(pattern, replace, p) }
}
// parseExpr parses s as an expression.
// It might make sense to expand this to allow statement patterns,
// but there are problems with preserving formatting and also
// with what a wildcard for a statement looks like.
func parseExpr(s, what string) ast.Expr {
x, err := parser.ParseExpr(s)
if err != nil {
fmt.Fprintf(os.Stderr, "parsing %s %s at %s\n", what, s, err)
os.Exit(2)
}
return x
}
// Keep this function for debugging.
/*
func dump(msg string, val reflect.Value) {
fmt.Printf("%s:\n", msg)
ast.Print(fileSet, val.Interface())
fmt.Println()
}
*/
// rewriteFile applies the rewrite rule 'pattern -> replace' to an entire file.
func rewriteFile(pattern, replace ast.Expr, p *ast.File) *ast.File {
cmap := ast.NewCommentMap(fileSet, p, p.Comments)
m := make(map[string]reflect.Value)
pat := reflect.ValueOf(pattern)
repl := reflect.ValueOf(replace)
var rewriteVal func(val reflect.Value) reflect.Value
rewriteVal = func(val reflect.Value) reflect.Value {
// don't bother if val is invalid to start with
if !val.IsValid() {
return reflect.Value{}
}
for k := range m {
delete(m, k)
}
val = apply(rewriteVal, val)
if match(m, pat, val) {
val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos()))
}
return val
}
r := apply(rewriteVal, reflect.ValueOf(p)).Interface().(*ast.File)
r.Comments = cmap.Filter(r).Comments() // recreate comments list
return r
}
// set is a wrapper for x.Set(y); it protects the caller from panics if x cannot be changed to y.
func set(x, y reflect.Value) {
// don't bother if x cannot be set or y is invalid
if !x.CanSet() || !y.IsValid() {
return
}
defer func() {
if x := recover(); x != nil {
if s, ok := x.(string); ok &&
(strings.Contains(s, "type mismatch") || strings.Contains(s, "not assignable")) {
// x cannot be set to y - ignore this rewrite
return
}
panic(x)
}
}()
x.Set(y)
}
// Values/types for special cases.
var (
objectPtrNil = reflect.ValueOf((*ast.Object)(nil))
scopePtrNil = reflect.ValueOf((*ast.Scope)(nil))
identType = reflect.TypeOf((*ast.Ident)(nil))
objectPtrType = reflect.TypeOf((*ast.Object)(nil))
positionType = reflect.TypeOf(token.NoPos)
callExprType = reflect.TypeOf((*ast.CallExpr)(nil))
scopePtrType = reflect.TypeOf((*ast.Scope)(nil))
)
// apply replaces each AST field x in val with f(x), returning val.
// To avoid extra conversions, f operates on the reflect.Value form.
func apply(f func(reflect.Value) reflect.Value, val reflect.Value) reflect.Value {
if !val.IsValid() {
return reflect.Value{}
}
// *ast.Objects introduce cycles and are likely incorrect after
// rewrite; don't follow them but replace with nil instead
if val.Type() == objectPtrType {
return objectPtrNil
}
// similarly for scopes: they are likely incorrect after a rewrite;
// replace them with nil
if val.Type() == scopePtrType {
return scopePtrNil
}
switch v := reflect.Indirect(val); v.Kind() {
case reflect.Slice:
for i := 0; i < v.Len(); i++ {
e := v.Index(i)
set(e, f(e))
}
case reflect.Struct:
for i := 0; i < v.NumField(); i++ {
e := v.Field(i)
set(e, f(e))
}
case reflect.Interface:
e := v.Elem()
set(v, f(e))
}
return val
}
func isWildcard(s string) bool {
rune, size := utf8.DecodeRuneInString(s)
return size == len(s) && unicode.IsLower(rune)
}
// match returns true if pattern matches val,
// recording wildcard submatches in m.
// If m == nil, match checks whether pattern == val.
func match(m map[string]reflect.Value, pattern, val reflect.Value) bool {
// Wildcard matches any expression. If it appears multiple
// times in the pattern, it must match the same expression
// each time.
if m != nil && pattern.IsValid() && pattern.Type() == identType {
name := pattern.Interface().(*ast.Ident).Name
if isWildcard(name) && val.IsValid() {
// wildcards only match valid (non-nil) expressions.
if _, ok := val.Interface().(ast.Expr); ok && !val.IsNil() {
if old, ok := m[name]; ok {
return match(nil, old, val)
}
m[name] = val
return true
}
}
}
// Otherwise, pattern and val must match recursively.
if !pattern.IsValid() || !val.IsValid() {
return !pattern.IsValid() && !val.IsValid()
}
if pattern.Type() != val.Type() {
return false
}
// Special cases.
switch pattern.Type() {
case identType:
// For identifiers, only the names need to match
// (and none of the other *ast.Object information).
// This is a common case, handle it all here instead
// of recursing down any further via reflection.
p := pattern.Interface().(*ast.Ident)
v := val.Interface().(*ast.Ident)
return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name
case objectPtrType, positionType:
// object pointers and token positions always match
return true
case callExprType:
// For calls, the Ellipsis fields (token.Position) must
// match since that is how f(x) and f(x...) are different.
// Check them here but fall through for the remaining fields.
p := pattern.Interface().(*ast.CallExpr)
v := val.Interface().(*ast.CallExpr)
if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() {
return false
}
}
p := reflect.Indirect(pattern)
v := reflect.Indirect(val)
if !p.IsValid() || !v.IsValid() {
return !p.IsValid() && !v.IsValid()
}
switch p.Kind() {
case reflect.Slice:
if p.Len() != v.Len() {
return false
}
for i := 0; i < p.Len(); i++ {
if !match(m, p.Index(i), v.Index(i)) {
return false
}
}
return true
case reflect.Struct:
for i := 0; i < p.NumField(); i++ {
if !match(m, p.Field(i), v.Field(i)) {
return false
}
}
return true
case reflect.Interface:
return match(m, p.Elem(), v.Elem())
}
// Handle token integers, etc.
return p.Interface() == v.Interface()
}
// subst returns a copy of pattern with values from m substituted in place
// of wildcards and pos used as the position of tokens from the pattern.
// if m == nil, subst returns a copy of pattern and doesn't change the line
// number information.
func subst(m map[string]reflect.Value, pattern reflect.Value, pos reflect.Value) reflect.Value {
if !pattern.IsValid() {
return reflect.Value{}
}
// Wildcard gets replaced with map value.
if m != nil && pattern.Type() == identType {
name := pattern.Interface().(*ast.Ident).Name
if isWildcard(name) {
if old, ok := m[name]; ok {
return subst(nil, old, reflect.Value{})
}
}
}
if pos.IsValid() && pattern.Type() == positionType {
// use new position only if old position was valid in the first place
if old := pattern.Interface().(token.Pos); !old.IsValid() {
return pattern
}
return pos
}
// Otherwise copy.
switch p := pattern; p.Kind() {
case reflect.Slice:
v := reflect.MakeSlice(p.Type(), p.Len(), p.Len())
for i := 0; i < p.Len(); i++ {
v.Index(i).Set(subst(m, p.Index(i), pos))
}
return v
case reflect.Struct:
v := reflect.New(p.Type()).Elem()
for i := 0; i < p.NumField(); i++ {
v.Field(i).Set(subst(m, p.Field(i), pos))
}
return v
case reflect.Ptr:
v := reflect.New(p.Type()).Elem()
if elem := p.Elem(); elem.IsValid() {
v.Set(subst(m, elem, pos).Addr())
}
return v
case reflect.Interface:
v := reflect.New(p.Type()).Elem()
if elem := p.Elem(); elem.IsValid() {
v.Set(subst(m, elem, pos))
}
return v
}
return pattern
}
| vendor/github.com/klauspost/cpuid/private-gen.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0023581862915307283,
0.00026728471857495606,
0.00016114948084577918,
0.00017004783148877323,
0.0003965490614064038
] |
{
"id": 7,
"code_window": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/extract(epoch from '5m'::interval))::int*extract(epoch from '5m'::interval)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/300)::bigint*300\")\n"
],
"file_path": "pkg/tsdb/postgres/macros_test.go",
"type": "replace",
"edit_start_line_idx": 47
} | package postgres
import (
"fmt"
"regexp"
"strings"
"github.com/grafana/grafana/pkg/tsdb"
)
//const rsString = `(?:"([^"]*)")`;
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type PostgresMacroEngine struct {
TimeRange *tsdb.TimeRange
}
func NewPostgresMacroEngine() tsdb.SqlMacroEngine {
return &PostgresMacroEngine{}
}
func (m *PostgresMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) {
m.TimeRange = timeRange
rExp, _ := regexp.Compile(sExpr)
var macroError error
sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
res, err := m.evaluateMacro(groups[1], strings.Split(groups[2], ","))
if err != nil && macroError == nil {
macroError = err
return "macro_error()"
}
return res
})
if macroError != nil {
return "", macroError
}
return sql, nil
}
func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
result := ""
lastIndex := 0
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
groups := []string{}
for i := 0; i < len(v); i += 2 {
groups = append(groups, str[v[i]:v[i+1]])
}
result += str[lastIndex:v[0]] + repl(groups)
lastIndex = v[1]
}
return result + str[lastIndex:]
}
func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s AS \"time\"", args[0]), nil
case "__timeEpoch":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil
case "__timeFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("extract(epoch from %s) BETWEEN %d AND %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeFrom":
return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__timeTo":
return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
}
return fmt.Sprintf("(extract(epoch from \"%s\")/extract(epoch from %s::interval))::int*extract(epoch from %s::interval)", args[0], args[1], args[1]), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__unixEpochFrom":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
default:
return "", fmt.Errorf("Unknown macro %v", name)
}
}
| pkg/tsdb/postgres/macros.go | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.1462128907442093,
0.014934380538761616,
0.00016271746426355094,
0.00017143343575298786,
0.04376167431473732
] |
{
"id": 7,
"code_window": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/extract(epoch from '5m'::interval))::int*extract(epoch from '5m'::interval)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/300)::bigint*300\")\n"
],
"file_path": "pkg/tsdb/postgres/macros_test.go",
"type": "replace",
"edit_start_line_idx": 47
} | // Copyright (c) 2016 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package jaeger
import (
"sync"
"sync/atomic"
"time"
"github.com/opentracing/opentracing-go"
"github.com/uber/jaeger-client-go/log"
)
// Reporter is called by the tracer when a span is completed to report the span to the tracing collector.
type Reporter interface {
// Report submits a new span to collectors, possibly asynchronously and/or with buffering.
Report(span *Span)
// Close does a clean shutdown of the reporter, flushing any traces that may be buffered in memory.
Close()
}
// ------------------------------
type nullReporter struct{}
// NewNullReporter creates a no-op reporter that ignores all reported spans.
func NewNullReporter() Reporter {
return &nullReporter{}
}
// Report implements Report() method of Reporter by doing nothing.
func (r *nullReporter) Report(span *Span) {
// no-op
}
// Close implements Close() method of Reporter by doing nothing.
func (r *nullReporter) Close() {
// no-op
}
// ------------------------------
type loggingReporter struct {
logger Logger
}
// NewLoggingReporter creates a reporter that logs all reported spans to provided logger.
func NewLoggingReporter(logger Logger) Reporter {
return &loggingReporter{logger}
}
// Report implements Report() method of Reporter by logging the span to the logger.
func (r *loggingReporter) Report(span *Span) {
r.logger.Infof("Reporting span %+v", span)
}
// Close implements Close() method of Reporter by doing nothing.
func (r *loggingReporter) Close() {
// no-op
}
// ------------------------------
// InMemoryReporter is used for testing, and simply collects spans in memory.
type InMemoryReporter struct {
spans []opentracing.Span
lock sync.Mutex
}
// NewInMemoryReporter creates a reporter that stores spans in memory.
// NOTE: the Tracer should be created with options.PoolSpans = false.
func NewInMemoryReporter() *InMemoryReporter {
return &InMemoryReporter{
spans: make([]opentracing.Span, 0, 10),
}
}
// Report implements Report() method of Reporter by storing the span in the buffer.
func (r *InMemoryReporter) Report(span *Span) {
r.lock.Lock()
r.spans = append(r.spans, span)
r.lock.Unlock()
}
// Close implements Close() method of Reporter by doing nothing.
func (r *InMemoryReporter) Close() {
// no-op
}
// SpansSubmitted returns the number of spans accumulated in the buffer.
func (r *InMemoryReporter) SpansSubmitted() int {
r.lock.Lock()
defer r.lock.Unlock()
return len(r.spans)
}
// GetSpans returns accumulated spans as a copy of the buffer.
func (r *InMemoryReporter) GetSpans() []opentracing.Span {
r.lock.Lock()
defer r.lock.Unlock()
copied := make([]opentracing.Span, len(r.spans))
copy(copied, r.spans)
return copied
}
// Reset clears all accumulated spans.
func (r *InMemoryReporter) Reset() {
r.lock.Lock()
defer r.lock.Unlock()
r.spans = nil
}
// ------------------------------
type compositeReporter struct {
reporters []Reporter
}
// NewCompositeReporter creates a reporter that ignores all reported spans.
func NewCompositeReporter(reporters ...Reporter) Reporter {
return &compositeReporter{reporters: reporters}
}
// Report implements Report() method of Reporter by delegating to each underlying reporter.
func (r *compositeReporter) Report(span *Span) {
for _, reporter := range r.reporters {
reporter.Report(span)
}
}
// Close implements Close() method of Reporter by closing each underlying reporter.
func (r *compositeReporter) Close() {
for _, reporter := range r.reporters {
reporter.Close()
}
}
// ------------------------------
const (
defaultQueueSize = 100
defaultBufferFlushInterval = 10 * time.Second
)
type remoteReporter struct {
// must be first in the struct because `sync/atomic` expects 64-bit alignment.
// Cf. https://github.com/uber/jaeger-client-go/issues/155, https://goo.gl/zW7dgq
queueLength int64
reporterOptions
sender Transport
queue chan *Span
queueDrained sync.WaitGroup
flushSignal chan *sync.WaitGroup
}
// NewRemoteReporter creates a new reporter that sends spans out of process by means of Sender
func NewRemoteReporter(sender Transport, opts ...ReporterOption) Reporter {
options := reporterOptions{}
for _, option := range opts {
option(&options)
}
if options.bufferFlushInterval <= 0 {
options.bufferFlushInterval = defaultBufferFlushInterval
}
if options.logger == nil {
options.logger = log.NullLogger
}
if options.metrics == nil {
options.metrics = NewNullMetrics()
}
if options.queueSize <= 0 {
options.queueSize = defaultQueueSize
}
reporter := &remoteReporter{
reporterOptions: options,
sender: sender,
flushSignal: make(chan *sync.WaitGroup),
queue: make(chan *Span, options.queueSize),
}
go reporter.processQueue()
return reporter
}
// Report implements Report() method of Reporter.
// It passes the span to a background go-routine for submission to Jaeger.
func (r *remoteReporter) Report(span *Span) {
select {
case r.queue <- span:
atomic.AddInt64(&r.queueLength, 1)
default:
r.metrics.ReporterDropped.Inc(1)
}
}
// Close implements Close() method of Reporter by waiting for the queue to be drained.
func (r *remoteReporter) Close() {
r.queueDrained.Add(1)
close(r.queue)
r.queueDrained.Wait()
r.sender.Close()
}
// processQueue reads spans from the queue, converts them to Thrift, and stores them in an internal buffer.
// When the buffer length reaches batchSize, it is flushed by submitting the accumulated spans to Jaeger.
// Buffer also gets flushed automatically every batchFlushInterval seconds, just in case the tracer stopped
// reporting new spans.
func (r *remoteReporter) processQueue() {
timer := time.NewTicker(r.bufferFlushInterval)
for {
select {
case span, ok := <-r.queue:
if ok {
atomic.AddInt64(&r.queueLength, -1)
if flushed, err := r.sender.Append(span); err != nil {
r.metrics.ReporterFailure.Inc(int64(flushed))
r.logger.Error(err.Error())
} else if flushed > 0 {
r.metrics.ReporterSuccess.Inc(int64(flushed))
// to reduce the number of gauge stats, we only emit queue length on flush
r.metrics.ReporterQueueLength.Update(atomic.LoadInt64(&r.queueLength))
}
} else {
// queue closed
timer.Stop()
r.flush()
r.queueDrained.Done()
return
}
case <-timer.C:
r.flush()
case wg := <-r.flushSignal: // for testing
r.flush()
wg.Done()
}
}
}
// flush causes the Sender to flush its accumulated spans and clear the buffer
func (r *remoteReporter) flush() {
if flushed, err := r.sender.Flush(); err != nil {
r.metrics.ReporterFailure.Inc(int64(flushed))
r.logger.Error(err.Error())
} else if flushed > 0 {
r.metrics.ReporterSuccess.Inc(int64(flushed))
}
}
| vendor/github.com/uber/jaeger-client-go/reporter.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00040734821232035756,
0.00017778470646589994,
0.00016010140825528651,
0.00017102093261200935,
0.000045218435843707994
] |
{
"id": 7,
"code_window": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/extract(epoch from '5m'::interval))::int*extract(epoch from '5m'::interval)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/300)::bigint*300\")\n"
],
"file_path": "pkg/tsdb/postgres/macros_test.go",
"type": "replace",
"edit_start_line_idx": 47
} | package procfs
import (
"fmt"
"io/ioutil"
"os"
"strconv"
"strings"
)
// Proc provides information about a running process.
type Proc struct {
// The process ID.
PID int
fs FS
}
// Procs represents a list of Proc structs.
type Procs []Proc
func (p Procs) Len() int { return len(p) }
func (p Procs) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Procs) Less(i, j int) bool { return p[i].PID < p[j].PID }
// Self returns a process for the current process read via /proc/self.
func Self() (Proc, error) {
fs, err := NewFS(DefaultMountPoint)
if err != nil {
return Proc{}, err
}
return fs.Self()
}
// NewProc returns a process for the given pid under /proc.
func NewProc(pid int) (Proc, error) {
fs, err := NewFS(DefaultMountPoint)
if err != nil {
return Proc{}, err
}
return fs.NewProc(pid)
}
// AllProcs returns a list of all currently available processes under /proc.
func AllProcs() (Procs, error) {
fs, err := NewFS(DefaultMountPoint)
if err != nil {
return Procs{}, err
}
return fs.AllProcs()
}
// Self returns a process for the current process.
func (fs FS) Self() (Proc, error) {
p, err := os.Readlink(fs.Path("self"))
if err != nil {
return Proc{}, err
}
pid, err := strconv.Atoi(strings.Replace(p, string(fs), "", -1))
if err != nil {
return Proc{}, err
}
return fs.NewProc(pid)
}
// NewProc returns a process for the given pid.
func (fs FS) NewProc(pid int) (Proc, error) {
if _, err := os.Stat(fs.Path(strconv.Itoa(pid))); err != nil {
return Proc{}, err
}
return Proc{PID: pid, fs: fs}, nil
}
// AllProcs returns a list of all currently available processes.
func (fs FS) AllProcs() (Procs, error) {
d, err := os.Open(fs.Path())
if err != nil {
return Procs{}, err
}
defer d.Close()
names, err := d.Readdirnames(-1)
if err != nil {
return Procs{}, fmt.Errorf("could not read %s: %s", d.Name(), err)
}
p := Procs{}
for _, n := range names {
pid, err := strconv.ParseInt(n, 10, 64)
if err != nil {
continue
}
p = append(p, Proc{PID: int(pid), fs: fs})
}
return p, nil
}
// CmdLine returns the command line of a process.
func (p Proc) CmdLine() ([]string, error) {
f, err := os.Open(p.path("cmdline"))
if err != nil {
return nil, err
}
defer f.Close()
data, err := ioutil.ReadAll(f)
if err != nil {
return nil, err
}
if len(data) < 1 {
return []string{}, nil
}
return strings.Split(string(data[:len(data)-1]), string(byte(0))), nil
}
// Comm returns the command name of a process.
func (p Proc) Comm() (string, error) {
f, err := os.Open(p.path("comm"))
if err != nil {
return "", err
}
defer f.Close()
data, err := ioutil.ReadAll(f)
if err != nil {
return "", err
}
return strings.TrimSpace(string(data)), nil
}
// Executable returns the absolute path of the executable command of a process.
func (p Proc) Executable() (string, error) {
exe, err := os.Readlink(p.path("exe"))
if os.IsNotExist(err) {
return "", nil
}
return exe, err
}
// FileDescriptors returns the currently open file descriptors of a process.
func (p Proc) FileDescriptors() ([]uintptr, error) {
names, err := p.fileDescriptors()
if err != nil {
return nil, err
}
fds := make([]uintptr, len(names))
for i, n := range names {
fd, err := strconv.ParseInt(n, 10, 32)
if err != nil {
return nil, fmt.Errorf("could not parse fd %s: %s", n, err)
}
fds[i] = uintptr(fd)
}
return fds, nil
}
// FileDescriptorTargets returns the targets of all file descriptors of a process.
// If a file descriptor is not a symlink to a file (like a socket), that value will be the empty string.
func (p Proc) FileDescriptorTargets() ([]string, error) {
names, err := p.fileDescriptors()
if err != nil {
return nil, err
}
targets := make([]string, len(names))
for i, name := range names {
target, err := os.Readlink(p.path("fd", name))
if err == nil {
targets[i] = target
}
}
return targets, nil
}
// FileDescriptorsLen returns the number of currently open file descriptors of
// a process.
func (p Proc) FileDescriptorsLen() (int, error) {
fds, err := p.fileDescriptors()
if err != nil {
return 0, err
}
return len(fds), nil
}
// MountStats retrieves statistics and configuration for mount points in a
// process's namespace.
func (p Proc) MountStats() ([]*Mount, error) {
f, err := os.Open(p.path("mountstats"))
if err != nil {
return nil, err
}
defer f.Close()
return parseMountStats(f)
}
func (p Proc) fileDescriptors() ([]string, error) {
d, err := os.Open(p.path("fd"))
if err != nil {
return nil, err
}
defer d.Close()
names, err := d.Readdirnames(-1)
if err != nil {
return nil, fmt.Errorf("could not read %s: %s", d.Name(), err)
}
return names, nil
}
func (p Proc) path(pa ...string) string {
return p.fs.Path(append([]string{strconv.Itoa(p.PID)}, pa...)...)
}
| vendor/github.com/prometheus/procfs/proc.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017592430231161416,
0.0001674591621849686,
0.0001571493485243991,
0.00016833128756843507,
0.000004387818080431316
] |
{
"id": 7,
"code_window": [
"\t\tConvey(\"interpolate __timeGroup function\", func() {\n",
"\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"GROUP BY $__timeGroup(time_column,'5m')\")\n",
"\t\t\tSo(err, ShouldBeNil)\n",
"\n",
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/extract(epoch from '5m'::interval))::int*extract(epoch from '5m'::interval)\")\n",
"\t\t})\n",
"\n",
"\t\tConvey(\"interpolate __timeTo function\", func() {\n",
"\t\t\tsql, err := engine.Interpolate(timeRange, \"select $__timeTo(time_column)\")\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tSo(sql, ShouldEqual, \"GROUP BY (extract(epoch from \\\"time_column\\\")/300)::bigint*300\")\n"
],
"file_path": "pkg/tsdb/postgres/macros_test.go",
"type": "replace",
"edit_start_line_idx": 47
} | // Copyright (c) 2017 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package jaeger
import (
"github.com/opentracing/opentracing-go/log"
"github.com/uber/jaeger-client-go/internal/baggage"
)
// baggageSetter is an actor that can set a baggage value on a Span given certain
// restrictions (eg. maxValueLength).
type baggageSetter struct {
restrictionManager baggage.RestrictionManager
metrics *Metrics
}
func newBaggageSetter(restrictionManager baggage.RestrictionManager, metrics *Metrics) *baggageSetter {
return &baggageSetter{
restrictionManager: restrictionManager,
metrics: metrics,
}
}
// (NB) span should hold the lock before making this call
func (s *baggageSetter) setBaggage(span *Span, key, value string) {
var truncated bool
var prevItem string
restriction := s.restrictionManager.GetRestriction(key)
if !restriction.KeyAllowed() {
s.logFields(span, key, value, prevItem, truncated, restriction.KeyAllowed())
s.metrics.BaggageUpdateFailure.Inc(1)
return
}
if len(value) > restriction.MaxValueLength() {
truncated = true
value = value[:restriction.MaxValueLength()]
s.metrics.BaggageTruncate.Inc(1)
}
prevItem = span.context.baggage[key]
s.logFields(span, key, value, prevItem, truncated, restriction.KeyAllowed())
span.context = span.context.WithBaggageItem(key, value)
s.metrics.BaggageUpdateSuccess.Inc(1)
}
func (s *baggageSetter) logFields(span *Span, key, value, prevItem string, truncated, valid bool) {
if !span.context.IsSampled() {
return
}
fields := []log.Field{
log.String("event", "baggage"),
log.String("key", key),
log.String("value", value),
}
if prevItem != "" {
fields = append(fields, log.String("override", "true"))
}
if truncated {
fields = append(fields, log.String("truncated", "true"))
}
if !valid {
fields = append(fields, log.String("invalid", "true"))
}
span.logFieldsNoLocking(fields...)
}
| vendor/github.com/uber/jaeger-client-go/baggage_setter.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017613981617614627,
0.00017046571883838624,
0.0001661640708334744,
0.00016975452308543026,
0.0000033093929232563823
] |
{
"id": 8,
"code_window": [
"\n",
"Macros:\n",
"- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec\n",
"- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877\n",
"- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877\n",
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> FROM_UNIXTIME(1492750877)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" $__timeGroup(timestamp_col, '1h') AS time,\n",
" sum(value_double) as value\n",
"FROM yourtable\n",
"GROUP BY 1\n",
"ORDER BY 1\n"
],
"file_path": "public/app/plugins/datasource/mysql/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 51
} | package mysql
import (
"fmt"
"regexp"
"github.com/grafana/grafana/pkg/tsdb"
)
//const rsString = `(?:"([^"]*)")`;
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type MySqlMacroEngine struct {
TimeRange *tsdb.TimeRange
}
func NewMysqlMacroEngine() tsdb.SqlMacroEngine {
return &MySqlMacroEngine{}
}
func (m *MySqlMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) {
m.TimeRange = timeRange
rExp, _ := regexp.Compile(sExpr)
var macroError error
sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
res, err := m.evaluateMacro(groups[1], groups[2:])
if err != nil && macroError == nil {
macroError = err
return "macro_error()"
}
return res
})
if macroError != nil {
return "", macroError
}
return sql, nil
}
func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
result := ""
lastIndex := 0
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
groups := []string{}
for i := 0; i < len(v); i += 2 {
groups = append(groups, str[v[i]:v[i+1]])
}
result += str[lastIndex:v[0]] + repl(groups)
lastIndex = v[1]
}
return result + str[lastIndex:]
}
func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("UNIX_TIMESTAMP(%s) as time_sec", args[0]), nil
case "__timeFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeFrom":
return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__timeTo":
return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__unixEpochFrom":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
default:
return "", fmt.Errorf("Unknown macro %v", name)
}
}
| pkg/tsdb/mysql/macros.go | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.1863841861486435,
0.02820230834186077,
0.00016682971909176558,
0.0017156293615698814,
0.057944219559431076
] |
{
"id": 8,
"code_window": [
"\n",
"Macros:\n",
"- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec\n",
"- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877\n",
"- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877\n",
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> FROM_UNIXTIME(1492750877)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" $__timeGroup(timestamp_col, '1h') AS time,\n",
" sum(value_double) as value\n",
"FROM yourtable\n",
"GROUP BY 1\n",
"ORDER BY 1\n"
],
"file_path": "public/app/plugins/datasource/mysql/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 51
} | // mksyscall.pl syscall_linux.go syscall_linux_amd64.go
// MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
// +build amd64,linux
package unix
import (
"syscall"
"unsafe"
)
var _ syscall.Errno
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func linkat(olddirfd int, oldpath string, newdirfd int, newpath string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_LINKAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_OPENAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), uintptr(mode), 0, 0)
use(unsafe.Pointer(_p0))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlinkat(dirfd int, path string, buf []byte) (n int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(buf) > 0 {
_p1 = unsafe.Pointer(&buf[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_READLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf)), 0, 0)
use(unsafe.Pointer(_p0))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func symlinkat(oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SYMLINKAT, uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)))
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func unlinkat(dirfd int, path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimes(path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimensat(dirfd int, path string, times *[2]Timespec, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_UTIMENSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), uintptr(flags), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func futimesat(dirfd int, path *byte, times *[2]Timeval) (err error) {
_, _, e1 := Syscall(SYS_FUTIMESAT, uintptr(dirfd), uintptr(unsafe.Pointer(path)), uintptr(unsafe.Pointer(times)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getcwd(buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETCWD, uintptr(_p0), uintptr(len(buf)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) {
r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0)
wpid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func reboot(magic1 uint, magic2 uint, cmd int, arg string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(arg)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_REBOOT, uintptr(magic1), uintptr(magic2), uintptr(cmd), uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mount(source string, target string, fstype string, flags uintptr, data *byte) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(source)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(target)
if err != nil {
return
}
var _p2 *byte
_p2, err = BytePtrFromString(fstype)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(flags), uintptr(unsafe.Pointer(data)), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
use(unsafe.Pointer(_p2))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Acct(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_ACCT, uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Adjtimex(buf *Timex) (state int, err error) {
r0, _, e1 := Syscall(SYS_ADJTIMEX, uintptr(unsafe.Pointer(buf)), 0, 0)
state = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chdir(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chroot(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ClockGettime(clockid int32, time *Timespec) (err error) {
_, _, e1 := Syscall(SYS_CLOCK_GETTIME, uintptr(clockid), uintptr(unsafe.Pointer(time)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Close(fd int) (err error) {
_, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup(oldfd int) (fd int, err error) {
r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup3(oldfd int, newfd int, flags int) (err error) {
_, _, e1 := Syscall(SYS_DUP3, uintptr(oldfd), uintptr(newfd), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate(size int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE, uintptr(size), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate1(flag int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE1, uintptr(flag), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) {
_, _, e1 := RawSyscall6(SYS_EPOLL_CTL, uintptr(epfd), uintptr(op), uintptr(fd), uintptr(unsafe.Pointer(event)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
var _p0 unsafe.Pointer
if len(events) > 0 {
_p0 = unsafe.Pointer(&events[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_EPOLL_WAIT, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Exit(code int) {
Syscall(SYS_EXIT_GROUP, uintptr(code), 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Faccessat(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FACCESSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fallocate(fd int, mode uint32, off int64, len int64) (err error) {
_, _, e1 := Syscall6(SYS_FALLOCATE, uintptr(fd), uintptr(mode), uintptr(off), uintptr(len), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchdir(fd int) (err error) {
_, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmod(fd int, mode uint32) (err error) {
_, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHMODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchownat(dirfd int, path string, uid int, gid int, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHOWNAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fcntl(fd int, cmd int, arg int) (val int, err error) {
r0, _, e1 := Syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg))
val = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fdatasync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FDATASYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Flock(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fsync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getdents(fd int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETDENTS64, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpgid(pid int) (pgid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0)
pgid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpid() (pid int) {
r0, _, _ := RawSyscall(SYS_GETPID, 0, 0, 0)
pid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getppid() (ppid int) {
r0, _, _ := RawSyscall(SYS_GETPPID, 0, 0, 0)
ppid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpriority(which int, who int) (prio int, err error) {
r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0)
prio = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrusage(who int, rusage *Rusage) (err error) {
_, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettid() (tid int) {
r0, _, _ := RawSyscall(SYS_GETTID, 0, 0, 0)
tid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getxattr(path string, attr string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(dest) > 0 {
_p2 = unsafe.Pointer(&dest[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_GETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest)), 0, 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(pathname)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_INOTIFY_ADD_WATCH, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(mask))
use(unsafe.Pointer(_p0))
watchdesc = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit1(flags int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT1, uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_RM_WATCH, uintptr(fd), uintptr(watchdesc), 0)
success = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Kill(pid int, sig syscall.Signal) (err error) {
_, _, e1 := RawSyscall(SYS_KILL, uintptr(pid), uintptr(sig), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Klogctl(typ int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_SYSLOG, uintptr(typ), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listxattr(path string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_LISTXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)))
use(unsafe.Pointer(_p0))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkdirat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKDIRAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mknodat(dirfd int, path string, mode uint32, dev int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MKNODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev), 0, 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Nanosleep(time *Timespec, leftover *Timespec) (err error) {
_, _, e1 := Syscall(SYS_NANOSLEEP, uintptr(unsafe.Pointer(time)), uintptr(unsafe.Pointer(leftover)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pause() (err error) {
_, _, e1 := Syscall(SYS_PAUSE, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func PivotRoot(newroot string, putold string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(newroot)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(putold)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_PIVOT_ROOT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func prlimit(pid int, resource int, old *Rlimit, newlimit *Rlimit) (err error) {
_, _, e1 := RawSyscall6(SYS_PRLIMIT64, uintptr(pid), uintptr(resource), uintptr(unsafe.Pointer(old)), uintptr(unsafe.Pointer(newlimit)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PRCTL, uintptr(option), uintptr(arg2), uintptr(arg3), uintptr(arg4), uintptr(arg5), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func read(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Removexattr(path string, attr string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_REMOVEXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), 0, 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setdomainname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETDOMAINNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sethostname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETHOSTNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpgid(pid int, pgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setsid() (pid int, err error) {
r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0)
pid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Settimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setns(fd int, nstype int) (err error) {
_, _, e1 := Syscall(SYS_SETNS, uintptr(fd), uintptr(nstype), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpriority(which int, who int, prio int) (err error) {
_, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setxattr(path string, attr string, data []byte, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(data) > 0 {
_p2 = unsafe.Pointer(&data[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
use(unsafe.Pointer(_p1))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sync() {
Syscall(SYS_SYNC, 0, 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sysinfo(info *Sysinfo_t) (err error) {
_, _, e1 := RawSyscall(SYS_SYSINFO, uintptr(unsafe.Pointer(info)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) {
r0, _, e1 := Syscall6(SYS_TEE, uintptr(rfd), uintptr(wfd), uintptr(len), uintptr(flags), 0, 0)
n = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tgkill(tgid int, tid int, sig syscall.Signal) (err error) {
_, _, e1 := RawSyscall(SYS_TGKILL, uintptr(tgid), uintptr(tid), uintptr(sig))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Times(tms *Tms) (ticks uintptr, err error) {
r0, _, e1 := RawSyscall(SYS_TIMES, uintptr(unsafe.Pointer(tms)), 0, 0)
ticks = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Umask(mask int) (oldmask int) {
r0, _, _ := RawSyscall(SYS_UMASK, uintptr(mask), 0, 0)
oldmask = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Uname(buf *Utsname) (err error) {
_, _, e1 := RawSyscall(SYS_UNAME, uintptr(unsafe.Pointer(buf)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unmount(target string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(target)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UMOUNT2, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unshare(flags int) (err error) {
_, _, e1 := Syscall(SYS_UNSHARE, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ustat(dev int, ubuf *Ustat_t) (err error) {
_, _, e1 := Syscall(SYS_USTAT, uintptr(dev), uintptr(unsafe.Pointer(ubuf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func write(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func exitThread(code int) (err error) {
_, _, e1 := Syscall(SYS_EXIT, uintptr(code), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func writelen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func munmap(addr uintptr, length uintptr) (err error) {
_, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Madvise(b []byte, advice int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MADVISE, uintptr(_p0), uintptr(len(b)), uintptr(advice))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mprotect(b []byte, prot int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlockall(flags int) (err error) {
_, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlockall() (err error) {
_, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup2(oldfd int, newfd int) (err error) {
_, _, e1 := Syscall(SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fadvise(fd int, offset int64, length int64, advice int) (err error) {
_, _, e1 := Syscall6(SYS_FADVISE64, uintptr(fd), uintptr(offset), uintptr(length), uintptr(advice), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchown(fd int, uid int, gid int) (err error) {
_, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstat(fd int, stat *Stat_t) (err error) {
_, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatfs(fd int, buf *Statfs_t) (err error) {
_, _, e1 := Syscall(SYS_FSTATFS, uintptr(fd), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ftruncate(fd int, length int64) (err error) {
_, _, e1 := Syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getegid() (egid int) {
r0, _, _ := RawSyscall(SYS_GETEGID, 0, 0, 0)
egid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Geteuid() (euid int) {
r0, _, _ := RawSyscall(SYS_GETEUID, 0, 0, 0)
euid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getgid() (gid int) {
r0, _, _ := RawSyscall(SYS_GETGID, 0, 0, 0)
gid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrlimit(resource int, rlim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getuid() (uid int) {
r0, _, _ := RawSyscall(SYS_GETUID, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit() (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT, 0, 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ioperm(from int, num int, on int) (err error) {
_, _, e1 := Syscall(SYS_IOPERM, uintptr(from), uintptr(num), uintptr(on))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Iopl(level int) (err error) {
_, _, e1 := Syscall(SYS_IOPL, uintptr(level), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lchown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listen(s int, n int) (err error) {
_, _, e1 := Syscall(SYS_LISTEN, uintptr(s), uintptr(n), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lstat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pread(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PREAD64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PWRITE64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Seek(fd int, offset int64, whence int) (off int64, err error) {
r0, _, e1 := Syscall(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(whence))
off = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) {
r0, _, e1 := Syscall6(SYS_SELECT, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
r0, _, e1 := Syscall6(SYS_SENDFILE, uintptr(outfd), uintptr(infd), uintptr(unsafe.Pointer(offset)), uintptr(count), 0, 0)
written = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsgid(gid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSGID, uintptr(gid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsuid(uid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSUID, uintptr(uid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setregid(rgid int, egid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresgid(rgid int, egid int, sgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESGID, uintptr(rgid), uintptr(egid), uintptr(sgid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresuid(ruid int, euid int, suid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESUID, uintptr(ruid), uintptr(euid), uintptr(suid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setrlimit(resource int, rlim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setreuid(ruid int, euid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Shutdown(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error) {
r0, _, e1 := Syscall6(SYS_SPLICE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags))
n = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Stat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Statfs(path string, buf *Statfs_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STATFS, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func SyncFileRange(fd int, off int64, n int64, flags int) (err error) {
_, _, e1 := Syscall6(SYS_SYNC_FILE_RANGE, uintptr(fd), uintptr(off), uintptr(n), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Truncate(path string, length int64) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_TRUNCATE, uintptr(unsafe.Pointer(_p0)), uintptr(length), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) {
r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) {
r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_BIND, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_CONNECT, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getgroups(n int, list *_Gid_t) (nn int, err error) {
r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
nn = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setgroups(n int, list *_Gid_t) (err error) {
_, _, e1 := RawSyscall(SYS_SETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) {
_, _, e1 := Syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) {
_, _, e1 := Syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socket(domain int, typ int, proto int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) {
_, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETPEERNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETSOCKNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_RECVFROM, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_RECVMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_SENDMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error) {
r0, _, e1 := Syscall6(SYS_MMAP, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flags), uintptr(fd), uintptr(offset))
xaddr = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Utime(path string, buf *Utimbuf) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
use(unsafe.Pointer(_p0))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe(p *[2]_C_int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe2(p *[2]_C_int, flags int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE2, uintptr(unsafe.Pointer(p)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
| vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.0008881217800080776,
0.00018074677791446447,
0.00016279648116324097,
0.00016785731713753194,
0.00007808055670466274
] |
{
"id": 8,
"code_window": [
"\n",
"Macros:\n",
"- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec\n",
"- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877\n",
"- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877\n",
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> FROM_UNIXTIME(1492750877)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" $__timeGroup(timestamp_col, '1h') AS time,\n",
" sum(value_double) as value\n",
"FROM yourtable\n",
"GROUP BY 1\n",
"ORDER BY 1\n"
],
"file_path": "public/app/plugins/datasource/mysql/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 51
} | // +build go1.2
package toml
// In order to support Go 1.1, we define our own TextMarshaler and
// TextUnmarshaler types. For Go 1.2+, we just alias them with the
// standard library interfaces.
import (
"encoding"
)
// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here
// so that Go 1.1 can be supported.
type TextMarshaler encoding.TextMarshaler
// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined
// here so that Go 1.1 can be supported.
type TextUnmarshaler encoding.TextUnmarshaler
| vendor/github.com/BurntSushi/toml/encoding_types.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00016719313862267882,
0.00016495073214173317,
0.0001627083111088723,
0.00016495073214173317,
0.000002242413756903261
] |
{
"id": 8,
"code_window": [
"\n",
"Macros:\n",
"- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec\n",
"- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877\n",
"- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877\n",
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> FROM_UNIXTIME(1492750877)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" $__timeGroup(timestamp_col, '1h') AS time,\n",
" sum(value_double) as value\n",
"FROM yourtable\n",
"GROUP BY 1\n",
"ORDER BY 1\n"
],
"file_path": "public/app/plugins/datasource/mysql/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 51
} | "6" | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-309 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017127861792687327,
0.00017127861792687327,
0.00017127861792687327,
0.00017127861792687327,
0
] |
{
"id": 9,
"code_window": [
"Macros:\n",
"- $__time(column) -> column as \"time\"\n",
"- $__timeEpoch -> extract(epoch from column) as \"time\"\n",
"- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)\n",
"- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877\n",
"\n",
"To group by time use $__timeGroup:\n",
"-> (extract(epoch from column)/extract(epoch from column::interval))::int\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 52
} | package mysql
import (
"testing"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func TestMacroEngine(t *testing.T) {
Convey("MacroEngine", t, func() {
engine := &MySqlMacroEngine{}
timeRange := &tsdb.TimeRange{From: "5m", To: "now"}
Convey("interpolate __time function", func() {
sql, err := engine.Interpolate(nil, "select $__time(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select UNIX_TIMESTAMP(time_column) as time_sec")
})
Convey("interpolate __time function wrapped in aggregation", func() {
sql, err := engine.Interpolate(nil, "select min($__time(time_column))")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select min(UNIX_TIMESTAMP(time_column) as time_sec)")
})
Convey("interpolate __timeFilter function", func() {
sql, err := engine.Interpolate(timeRange, "WHERE $__timeFilter(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "WHERE time_column >= FROM_UNIXTIME(18446744066914186738) AND time_column <= FROM_UNIXTIME(18446744066914187038)")
})
Convey("interpolate __timeFrom function", func() {
sql, err := engine.Interpolate(timeRange, "select $__timeFrom(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select FROM_UNIXTIME(18446744066914186738)")
})
Convey("interpolate __timeTo function", func() {
sql, err := engine.Interpolate(timeRange, "select $__timeTo(time_column)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select FROM_UNIXTIME(18446744066914187038)")
})
Convey("interpolate __unixEpochFilter function", func() {
sql, err := engine.Interpolate(timeRange, "select $__unixEpochFilter(18446744066914186738)")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038")
})
Convey("interpolate __unixEpochFrom function", func() {
sql, err := engine.Interpolate(timeRange, "select $__unixEpochFrom()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914186738")
})
Convey("interpolate __unixEpochTo function", func() {
sql, err := engine.Interpolate(timeRange, "select $__unixEpochTo()")
So(err, ShouldBeNil)
So(sql, ShouldEqual, "select 18446744066914187038")
})
})
}
| pkg/tsdb/mysql/macros_test.go | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.9885453581809998,
0.16123536229133606,
0.00016811760724522173,
0.0021410249173641205,
0.32567310333251953
] |
{
"id": 9,
"code_window": [
"Macros:\n",
"- $__time(column) -> column as \"time\"\n",
"- $__timeEpoch -> extract(epoch from column) as \"time\"\n",
"- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)\n",
"- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877\n",
"\n",
"To group by time use $__timeGroup:\n",
"-> (extract(epoch from column)/extract(epoch from column::interval))::int\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 52
} | "!\r" | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-199 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00019901605264749378,
0.00019901605264749378,
0.00019901605264749378,
0.00019901605264749378,
0
] |
{
"id": 9,
"code_window": [
"Macros:\n",
"- $__time(column) -> column as \"time\"\n",
"- $__timeEpoch -> extract(epoch from column) as \"time\"\n",
"- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)\n",
"- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877\n",
"\n",
"To group by time use $__timeGroup:\n",
"-> (extract(epoch from column)/extract(epoch from column::interval))::int\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 52
} | // Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2010 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package proto
// Functions for parsing the Text protocol buffer format.
// TODO: message sets.
import (
"encoding"
"errors"
"fmt"
"reflect"
"strconv"
"strings"
"unicode/utf8"
)
// Error string emitted when deserializing Any and fields are already set
const anyRepeatedlyUnpacked = "Any message unpacked multiple times, or %q already set"
type ParseError struct {
Message string
Line int // 1-based line number
Offset int // 0-based byte offset from start of input
}
func (p *ParseError) Error() string {
if p.Line == 1 {
// show offset only for first line
return fmt.Sprintf("line 1.%d: %v", p.Offset, p.Message)
}
return fmt.Sprintf("line %d: %v", p.Line, p.Message)
}
type token struct {
value string
err *ParseError
line int // line number
offset int // byte number from start of input, not start of line
unquoted string // the unquoted version of value, if it was a quoted string
}
func (t *token) String() string {
if t.err == nil {
return fmt.Sprintf("%q (line=%d, offset=%d)", t.value, t.line, t.offset)
}
return fmt.Sprintf("parse error: %v", t.err)
}
type textParser struct {
s string // remaining input
done bool // whether the parsing is finished (success or error)
backed bool // whether back() was called
offset, line int
cur token
}
func newTextParser(s string) *textParser {
p := new(textParser)
p.s = s
p.line = 1
p.cur.line = 1
return p
}
func (p *textParser) errorf(format string, a ...interface{}) *ParseError {
pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset}
p.cur.err = pe
p.done = true
return pe
}
// Numbers and identifiers are matched by [-+._A-Za-z0-9]
func isIdentOrNumberChar(c byte) bool {
switch {
case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z':
return true
case '0' <= c && c <= '9':
return true
}
switch c {
case '-', '+', '.', '_':
return true
}
return false
}
func isWhitespace(c byte) bool {
switch c {
case ' ', '\t', '\n', '\r':
return true
}
return false
}
func isQuote(c byte) bool {
switch c {
case '"', '\'':
return true
}
return false
}
func (p *textParser) skipWhitespace() {
i := 0
for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') {
if p.s[i] == '#' {
// comment; skip to end of line or input
for i < len(p.s) && p.s[i] != '\n' {
i++
}
if i == len(p.s) {
break
}
}
if p.s[i] == '\n' {
p.line++
}
i++
}
p.offset += i
p.s = p.s[i:len(p.s)]
if len(p.s) == 0 {
p.done = true
}
}
func (p *textParser) advance() {
// Skip whitespace
p.skipWhitespace()
if p.done {
return
}
// Start of non-whitespace
p.cur.err = nil
p.cur.offset, p.cur.line = p.offset, p.line
p.cur.unquoted = ""
switch p.s[0] {
case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/':
// Single symbol
p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)]
case '"', '\'':
// Quoted string
i := 1
for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' {
if p.s[i] == '\\' && i+1 < len(p.s) {
// skip escaped char
i++
}
i++
}
if i >= len(p.s) || p.s[i] != p.s[0] {
p.errorf("unmatched quote")
return
}
unq, err := unquoteC(p.s[1:i], rune(p.s[0]))
if err != nil {
p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err)
return
}
p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)]
p.cur.unquoted = unq
default:
i := 0
for i < len(p.s) && isIdentOrNumberChar(p.s[i]) {
i++
}
if i == 0 {
p.errorf("unexpected byte %#x", p.s[0])
return
}
p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)]
}
p.offset += len(p.cur.value)
}
var (
errBadUTF8 = errors.New("proto: bad UTF-8")
errBadHex = errors.New("proto: bad hexadecimal")
)
func unquoteC(s string, quote rune) (string, error) {
// This is based on C++'s tokenizer.cc.
// Despite its name, this is *not* parsing C syntax.
// For instance, "\0" is an invalid quoted string.
// Avoid allocation in trivial cases.
simple := true
for _, r := range s {
if r == '\\' || r == quote {
simple = false
break
}
}
if simple {
return s, nil
}
buf := make([]byte, 0, 3*len(s)/2)
for len(s) > 0 {
r, n := utf8.DecodeRuneInString(s)
if r == utf8.RuneError && n == 1 {
return "", errBadUTF8
}
s = s[n:]
if r != '\\' {
if r < utf8.RuneSelf {
buf = append(buf, byte(r))
} else {
buf = append(buf, string(r)...)
}
continue
}
ch, tail, err := unescape(s)
if err != nil {
return "", err
}
buf = append(buf, ch...)
s = tail
}
return string(buf), nil
}
func unescape(s string) (ch string, tail string, err error) {
r, n := utf8.DecodeRuneInString(s)
if r == utf8.RuneError && n == 1 {
return "", "", errBadUTF8
}
s = s[n:]
switch r {
case 'a':
return "\a", s, nil
case 'b':
return "\b", s, nil
case 'f':
return "\f", s, nil
case 'n':
return "\n", s, nil
case 'r':
return "\r", s, nil
case 't':
return "\t", s, nil
case 'v':
return "\v", s, nil
case '?':
return "?", s, nil // trigraph workaround
case '\'', '"', '\\':
return string(r), s, nil
case '0', '1', '2', '3', '4', '5', '6', '7', 'x', 'X':
if len(s) < 2 {
return "", "", fmt.Errorf(`\%c requires 2 following digits`, r)
}
base := 8
ss := s[:2]
s = s[2:]
if r == 'x' || r == 'X' {
base = 16
} else {
ss = string(r) + ss
}
i, err := strconv.ParseUint(ss, base, 8)
if err != nil {
return "", "", err
}
return string([]byte{byte(i)}), s, nil
case 'u', 'U':
n := 4
if r == 'U' {
n = 8
}
if len(s) < n {
return "", "", fmt.Errorf(`\%c requires %d digits`, r, n)
}
bs := make([]byte, n/2)
for i := 0; i < n; i += 2 {
a, ok1 := unhex(s[i])
b, ok2 := unhex(s[i+1])
if !ok1 || !ok2 {
return "", "", errBadHex
}
bs[i/2] = a<<4 | b
}
s = s[n:]
return string(bs), s, nil
}
return "", "", fmt.Errorf(`unknown escape \%c`, r)
}
// Adapted from src/pkg/strconv/quote.go.
func unhex(b byte) (v byte, ok bool) {
switch {
case '0' <= b && b <= '9':
return b - '0', true
case 'a' <= b && b <= 'f':
return b - 'a' + 10, true
case 'A' <= b && b <= 'F':
return b - 'A' + 10, true
}
return 0, false
}
// Back off the parser by one token. Can only be done between calls to next().
// It makes the next advance() a no-op.
func (p *textParser) back() { p.backed = true }
// Advances the parser and returns the new current token.
func (p *textParser) next() *token {
if p.backed || p.done {
p.backed = false
return &p.cur
}
p.advance()
if p.done {
p.cur.value = ""
} else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) {
// Look for multiple quoted strings separated by whitespace,
// and concatenate them.
cat := p.cur
for {
p.skipWhitespace()
if p.done || !isQuote(p.s[0]) {
break
}
p.advance()
if p.cur.err != nil {
return &p.cur
}
cat.value += " " + p.cur.value
cat.unquoted += p.cur.unquoted
}
p.done = false // parser may have seen EOF, but we want to return cat
p.cur = cat
}
return &p.cur
}
func (p *textParser) consumeToken(s string) error {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value != s {
p.back()
return p.errorf("expected %q, found %q", s, tok.value)
}
return nil
}
// Return a RequiredNotSetError indicating which required field was not set.
func (p *textParser) missingRequiredFieldError(sv reflect.Value) *RequiredNotSetError {
st := sv.Type()
sprops := GetProperties(st)
for i := 0; i < st.NumField(); i++ {
if !isNil(sv.Field(i)) {
continue
}
props := sprops.Prop[i]
if props.Required {
return &RequiredNotSetError{fmt.Sprintf("%v.%v", st, props.OrigName)}
}
}
return &RequiredNotSetError{fmt.Sprintf("%v.<unknown field name>", st)} // should not happen
}
// Returns the index in the struct for the named field, as well as the parsed tag properties.
func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) {
i, ok := sprops.decoderOrigNames[name]
if ok {
return i, sprops.Prop[i], true
}
return -1, nil, false
}
// Consume a ':' from the input stream (if the next token is a colon),
// returning an error if a colon is needed but not present.
func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value != ":" {
// Colon is optional when the field is a group or message.
needColon := true
switch props.Wire {
case "group":
needColon = false
case "bytes":
// A "bytes" field is either a message, a string, or a repeated field;
// those three become *T, *string and []T respectively, so we can check for
// this field being a pointer to a non-string.
if typ.Kind() == reflect.Ptr {
// *T or *string
if typ.Elem().Kind() == reflect.String {
break
}
} else if typ.Kind() == reflect.Slice {
// []T or []*T
if typ.Elem().Kind() != reflect.Ptr {
break
}
} else if typ.Kind() == reflect.String {
// The proto3 exception is for a string field,
// which requires a colon.
break
}
needColon = false
}
if needColon {
return p.errorf("expected ':', found %q", tok.value)
}
p.back()
}
return nil
}
func (p *textParser) readStruct(sv reflect.Value, terminator string) error {
st := sv.Type()
sprops := GetProperties(st)
reqCount := sprops.reqCount
var reqFieldErr error
fieldSet := make(map[string]bool)
// A struct is a sequence of "name: value", terminated by one of
// '>' or '}', or the end of the input. A name may also be
// "[extension]" or "[type/url]".
//
// The whole struct can also be an expanded Any message, like:
// [type/url] < ... struct contents ... >
for {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value == terminator {
break
}
if tok.value == "[" {
// Looks like an extension or an Any.
//
// TODO: Check whether we need to handle
// namespace rooted names (e.g. ".something.Foo").
extName, err := p.consumeExtName()
if err != nil {
return err
}
if s := strings.LastIndex(extName, "/"); s >= 0 {
// If it contains a slash, it's an Any type URL.
messageName := extName[s+1:]
mt := MessageType(messageName)
if mt == nil {
return p.errorf("unrecognized message %q in google.protobuf.Any", messageName)
}
tok = p.next()
if tok.err != nil {
return tok.err
}
// consume an optional colon
if tok.value == ":" {
tok = p.next()
if tok.err != nil {
return tok.err
}
}
var terminator string
switch tok.value {
case "<":
terminator = ">"
case "{":
terminator = "}"
default:
return p.errorf("expected '{' or '<', found %q", tok.value)
}
v := reflect.New(mt.Elem())
if pe := p.readStruct(v.Elem(), terminator); pe != nil {
return pe
}
b, err := Marshal(v.Interface().(Message))
if err != nil {
return p.errorf("failed to marshal message of type %q: %v", messageName, err)
}
if fieldSet["type_url"] {
return p.errorf(anyRepeatedlyUnpacked, "type_url")
}
if fieldSet["value"] {
return p.errorf(anyRepeatedlyUnpacked, "value")
}
sv.FieldByName("TypeUrl").SetString(extName)
sv.FieldByName("Value").SetBytes(b)
fieldSet["type_url"] = true
fieldSet["value"] = true
continue
}
var desc *ExtensionDesc
// This could be faster, but it's functional.
// TODO: Do something smarter than a linear scan.
for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) {
if d.Name == extName {
desc = d
break
}
}
if desc == nil {
return p.errorf("unrecognized extension %q", extName)
}
props := &Properties{}
props.Parse(desc.Tag)
typ := reflect.TypeOf(desc.ExtensionType)
if err := p.checkForColon(props, typ); err != nil {
return err
}
rep := desc.repeated()
// Read the extension structure, and set it in
// the value we're constructing.
var ext reflect.Value
if !rep {
ext = reflect.New(typ).Elem()
} else {
ext = reflect.New(typ.Elem()).Elem()
}
if err := p.readAny(ext, props); err != nil {
if _, ok := err.(*RequiredNotSetError); !ok {
return err
}
reqFieldErr = err
}
ep := sv.Addr().Interface().(Message)
if !rep {
SetExtension(ep, desc, ext.Interface())
} else {
old, err := GetExtension(ep, desc)
var sl reflect.Value
if err == nil {
sl = reflect.ValueOf(old) // existing slice
} else {
sl = reflect.MakeSlice(typ, 0, 1)
}
sl = reflect.Append(sl, ext)
SetExtension(ep, desc, sl.Interface())
}
if err := p.consumeOptionalSeparator(); err != nil {
return err
}
continue
}
// This is a normal, non-extension field.
name := tok.value
var dst reflect.Value
fi, props, ok := structFieldByName(sprops, name)
if ok {
dst = sv.Field(fi)
} else if oop, ok := sprops.OneofTypes[name]; ok {
// It is a oneof.
props = oop.Prop
nv := reflect.New(oop.Type.Elem())
dst = nv.Elem().Field(0)
field := sv.Field(oop.Field)
if !field.IsNil() {
return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name)
}
field.Set(nv)
}
if !dst.IsValid() {
return p.errorf("unknown field name %q in %v", name, st)
}
if dst.Kind() == reflect.Map {
// Consume any colon.
if err := p.checkForColon(props, dst.Type()); err != nil {
return err
}
// Construct the map if it doesn't already exist.
if dst.IsNil() {
dst.Set(reflect.MakeMap(dst.Type()))
}
key := reflect.New(dst.Type().Key()).Elem()
val := reflect.New(dst.Type().Elem()).Elem()
// The map entry should be this sequence of tokens:
// < key : KEY value : VALUE >
// However, implementations may omit key or value, and technically
// we should support them in any order. See b/28924776 for a time
// this went wrong.
tok := p.next()
var terminator string
switch tok.value {
case "<":
terminator = ">"
case "{":
terminator = "}"
default:
return p.errorf("expected '{' or '<', found %q", tok.value)
}
for {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value == terminator {
break
}
switch tok.value {
case "key":
if err := p.consumeToken(":"); err != nil {
return err
}
if err := p.readAny(key, props.mkeyprop); err != nil {
return err
}
if err := p.consumeOptionalSeparator(); err != nil {
return err
}
case "value":
if err := p.checkForColon(props.mvalprop, dst.Type().Elem()); err != nil {
return err
}
if err := p.readAny(val, props.mvalprop); err != nil {
return err
}
if err := p.consumeOptionalSeparator(); err != nil {
return err
}
default:
p.back()
return p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value)
}
}
dst.SetMapIndex(key, val)
continue
}
// Check that it's not already set if it's not a repeated field.
if !props.Repeated && fieldSet[name] {
return p.errorf("non-repeated field %q was repeated", name)
}
if err := p.checkForColon(props, dst.Type()); err != nil {
return err
}
// Parse into the field.
fieldSet[name] = true
if err := p.readAny(dst, props); err != nil {
if _, ok := err.(*RequiredNotSetError); !ok {
return err
}
reqFieldErr = err
}
if props.Required {
reqCount--
}
if err := p.consumeOptionalSeparator(); err != nil {
return err
}
}
if reqCount > 0 {
return p.missingRequiredFieldError(sv)
}
return reqFieldErr
}
// consumeExtName consumes extension name or expanded Any type URL and the
// following ']'. It returns the name or URL consumed.
func (p *textParser) consumeExtName() (string, error) {
tok := p.next()
if tok.err != nil {
return "", tok.err
}
// If extension name or type url is quoted, it's a single token.
if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] {
name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0]))
if err != nil {
return "", err
}
return name, p.consumeToken("]")
}
// Consume everything up to "]"
var parts []string
for tok.value != "]" {
parts = append(parts, tok.value)
tok = p.next()
if tok.err != nil {
return "", p.errorf("unrecognized type_url or extension name: %s", tok.err)
}
}
return strings.Join(parts, ""), nil
}
// consumeOptionalSeparator consumes an optional semicolon or comma.
// It is used in readStruct to provide backward compatibility.
func (p *textParser) consumeOptionalSeparator() error {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value != ";" && tok.value != "," {
p.back()
}
return nil
}
func (p *textParser) readAny(v reflect.Value, props *Properties) error {
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value == "" {
return p.errorf("unexpected EOF")
}
switch fv := v; fv.Kind() {
case reflect.Slice:
at := v.Type()
if at.Elem().Kind() == reflect.Uint8 {
// Special case for []byte
if tok.value[0] != '"' && tok.value[0] != '\'' {
// Deliberately written out here, as the error after
// this switch statement would write "invalid []byte: ...",
// which is not as user-friendly.
return p.errorf("invalid string: %v", tok.value)
}
bytes := []byte(tok.unquoted)
fv.Set(reflect.ValueOf(bytes))
return nil
}
// Repeated field.
if tok.value == "[" {
// Repeated field with list notation, like [1,2,3].
for {
fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
err := p.readAny(fv.Index(fv.Len()-1), props)
if err != nil {
return err
}
tok := p.next()
if tok.err != nil {
return tok.err
}
if tok.value == "]" {
break
}
if tok.value != "," {
return p.errorf("Expected ']' or ',' found %q", tok.value)
}
}
return nil
}
// One value of the repeated field.
p.back()
fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem()))
return p.readAny(fv.Index(fv.Len()-1), props)
case reflect.Bool:
// true/1/t/True or false/f/0/False.
switch tok.value {
case "true", "1", "t", "True":
fv.SetBool(true)
return nil
case "false", "0", "f", "False":
fv.SetBool(false)
return nil
}
case reflect.Float32, reflect.Float64:
v := tok.value
// Ignore 'f' for compatibility with output generated by C++, but don't
// remove 'f' when the value is "-inf" or "inf".
if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" {
v = v[:len(v)-1]
}
if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil {
fv.SetFloat(f)
return nil
}
case reflect.Int32:
if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil {
fv.SetInt(x)
return nil
}
if len(props.Enum) == 0 {
break
}
m, ok := enumValueMaps[props.Enum]
if !ok {
break
}
x, ok := m[tok.value]
if !ok {
break
}
fv.SetInt(int64(x))
return nil
case reflect.Int64:
if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil {
fv.SetInt(x)
return nil
}
case reflect.Ptr:
// A basic field (indirected through pointer), or a repeated message/group
p.back()
fv.Set(reflect.New(fv.Type().Elem()))
return p.readAny(fv.Elem(), props)
case reflect.String:
if tok.value[0] == '"' || tok.value[0] == '\'' {
fv.SetString(tok.unquoted)
return nil
}
case reflect.Struct:
var terminator string
switch tok.value {
case "{":
terminator = "}"
case "<":
terminator = ">"
default:
return p.errorf("expected '{' or '<', found %q", tok.value)
}
// TODO: Handle nested messages which implement encoding.TextUnmarshaler.
return p.readStruct(fv, terminator)
case reflect.Uint32:
if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil {
fv.SetUint(x)
return nil
}
case reflect.Uint64:
if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil {
fv.SetUint(x)
return nil
}
}
return p.errorf("invalid %v: %v", v.Type(), tok.value)
}
// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb
// before starting to unmarshal, so any existing data in pb is always removed.
// If a required field is not set and no other error occurs,
// UnmarshalText returns *RequiredNotSetError.
func UnmarshalText(s string, pb Message) error {
if um, ok := pb.(encoding.TextUnmarshaler); ok {
err := um.UnmarshalText([]byte(s))
return err
}
pb.Reset()
v := reflect.ValueOf(pb)
if pe := newTextParser(s).readStruct(v.Elem(), ""); pe != nil {
return pe
}
return nil
}
| vendor/github.com/golang/protobuf/proto/text_parser.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00022559048375114799,
0.000172094238223508,
0.00016552029410377145,
0.00016987741400953382,
0.000008353230441571213
] |
{
"id": 9,
"code_window": [
"Macros:\n",
"- $__time(column) -> column as \"time\"\n",
"- $__timeEpoch -> extract(epoch from column) as \"time\"\n",
"- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)\n",
"- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877\n",
"\n",
"To group by time use $__timeGroup:\n",
"-> (extract(epoch from column)/extract(epoch from column::interval))::int\n",
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"- $__timeGroup(column,'5m') -> (extract(epoch from \"dateColumn\")/extract(epoch from '5m'::interval))::int\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 52
} | // +build !go1.3
package quotedprintable
import "bytes"
var ch = make(chan *bytes.Buffer, 32)
func getBuffer() *bytes.Buffer {
select {
case buf := <-ch:
return buf
default:
}
return new(bytes.Buffer)
}
func putBuffer(buf *bytes.Buffer) {
buf.Reset()
select {
case ch <- buf:
default:
}
}
| vendor/gopkg.in/alexcesaro/quotedprintable.v3/pool_go12.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00020968486205674708,
0.00018603267380967736,
0.00016802210302557796,
0.00018039107089862227,
0.000017470301827415824
] |
{
"id": 10,
"code_window": [
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" min(date_time_col) AS time_sec,\n",
" sum(value_double) as value\n",
"FROM yourtable\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep"
],
"after_edit": [
" $__timeGroup(date_time_col, '1h') AS time,\n",
" sum(value) as value\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 58
} | <query-editor-row query-ctrl="ctrl" can-collapse="false">
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<code-editor content="ctrl.target.rawSql" datasource="ctrl.datasource" on-change="ctrl.panelCtrl.refresh()" data-mode="sql">
</code-editor>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form">
<label class="gf-form-label query-keyword">Format as</label>
<div class="gf-form-select-wrapper">
<select class="gf-form-input gf-size-auto" ng-model="ctrl.target.format" ng-options="f.value as f.text for f in ctrl.formats" ng-change="ctrl.refresh()"></select>
</div>
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
Show Help
<i class="fa fa-caret-down" ng-show="ctrl.showHelp"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showHelp"></i>
</label>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryMeta">
<label class="gf-form-label query-keyword" ng-click="ctrl.showLastQuerySQL = !ctrl.showLastQuerySQL">
Generated SQL
<i class="fa fa-caret-down" ng-show="ctrl.showLastQuerySQL"></i>
<i class="fa fa-caret-right" ng-hide="ctrl.showLastQuerySQL"></i>
</label>
</div>
<div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div>
</div>
</div>
<div class="gf-form" ng-show="ctrl.showLastQuerySQL">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.sql}}</pre>
</div>
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info">Time series:
- return column named <i>time</i> (UTC in seconds or timestamp)
- return column(s) with numeric datatype as values
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
Table:
- return any set of columns
Macros:
- $__time(column) -> column as "time"
- $__timeEpoch -> extract(epoch from column) as "time"
- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877)
- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877
To group by time use $__timeGroup:
-> (extract(epoch from column)/extract(epoch from column::interval))::int
Example of group by and order by with $__timeGroup:
SELECT
min(date_time_col) AS time_sec,
sum(value_double) as value
FROM yourtable
group by $__timeGroup(date_time_col, '1h')
order by $__timeGroup(date_time_col, '1h') ASC
Or build your own conditionals using these macros which just return the values:
- $__timeFrom() -> to_timestamp(1492750877)
- $__timeTo() -> to_timestamp(1492750877)
- $__unixEpochFrom() -> 1492750877
- $__unixEpochTo() -> 1492750877
</pre>
</div>
</div>
<div class="gf-form" ng-show="ctrl.lastQueryError">
<pre class="gf-form-pre alert alert-error">{{ctrl.lastQueryError}}</pre>
</div>
</query-editor-row>
| public/app/plugins/datasource/postgres/partials/query.editor.html | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.9903656244277954,
0.1425675004720688,
0.0001641951675992459,
0.0001683792215771973,
0.3240814208984375
] |
{
"id": 10,
"code_window": [
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" min(date_time_col) AS time_sec,\n",
" sum(value_double) as value\n",
"FROM yourtable\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep"
],
"after_edit": [
" $__timeGroup(date_time_col, '1h') AS time,\n",
" sum(value) as value\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 58
} | package notifiers
import (
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting"
. "github.com/smartystreets/goconvey/convey"
)
func TestThreemaNotifier(t *testing.T) {
Convey("Threema notifier tests", t, func() {
Convey("Parsing alert notification from settings", func() {
Convey("empty settings should return error", func() {
json := `{ }`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "threema_testing",
Type: "threema",
Settings: settingsJSON,
}
_, err := NewThreemaNotifier(model)
So(err, ShouldNotBeNil)
})
Convey("valid settings should be parsed successfully", func() {
json := `
{
"gateway_id": "*3MAGWID",
"recipient_id": "ECHOECHO",
"api_secret": "1234"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "threema_testing",
Type: "threema",
Settings: settingsJSON,
}
not, err := NewThreemaNotifier(model)
So(err, ShouldBeNil)
threemaNotifier := not.(*ThreemaNotifier)
So(err, ShouldBeNil)
So(threemaNotifier.Name, ShouldEqual, "threema_testing")
So(threemaNotifier.Type, ShouldEqual, "threema")
So(threemaNotifier.GatewayID, ShouldEqual, "*3MAGWID")
So(threemaNotifier.RecipientID, ShouldEqual, "ECHOECHO")
So(threemaNotifier.APISecret, ShouldEqual, "1234")
})
Convey("invalid Threema Gateway IDs should be rejected (prefix)", func() {
json := `
{
"gateway_id": "ECHOECHO",
"recipient_id": "ECHOECHO",
"api_secret": "1234"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "threema_testing",
Type: "threema",
Settings: settingsJSON,
}
not, err := NewThreemaNotifier(model)
So(not, ShouldBeNil)
So(err.(alerting.ValidationError).Reason, ShouldEqual, "Invalid Threema Gateway ID: Must start with a *")
})
Convey("invalid Threema Gateway IDs should be rejected (length)", func() {
json := `
{
"gateway_id": "*ECHOECHO",
"recipient_id": "ECHOECHO",
"api_secret": "1234"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "threema_testing",
Type: "threema",
Settings: settingsJSON,
}
not, err := NewThreemaNotifier(model)
So(not, ShouldBeNil)
So(err.(alerting.ValidationError).Reason, ShouldEqual, "Invalid Threema Gateway ID: Must be 8 characters long")
})
Convey("invalid Threema Recipient IDs should be rejected (length)", func() {
json := `
{
"gateway_id": "*3MAGWID",
"recipient_id": "ECHOECH",
"api_secret": "1234"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
model := &m.AlertNotification{
Name: "threema_testing",
Type: "threema",
Settings: settingsJSON,
}
not, err := NewThreemaNotifier(model)
So(not, ShouldBeNil)
So(err.(alerting.ValidationError).Reason, ShouldEqual, "Invalid Threema Recipient ID: Must be 8 characters long")
})
})
})
}
| pkg/services/alerting/notifiers/threema_test.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017145232413895428,
0.00016948790289461613,
0.00016675624647177756,
0.0001696692779660225,
0.0000012080747637810418
] |
{
"id": 10,
"code_window": [
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" min(date_time_col) AS time_sec,\n",
" sum(value_double) as value\n",
"FROM yourtable\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep"
],
"after_edit": [
" $__timeGroup(date_time_col, '1h') AS time,\n",
" sum(value) as value\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 58
} | package models
import (
"errors"
"time"
)
// Typed errors
var (
ErrTempUserNotFound = errors.New("User not found")
)
type TempUserStatus string
const (
TmpUserSignUpStarted TempUserStatus = "SignUpStarted"
TmpUserInvitePending TempUserStatus = "InvitePending"
TmpUserCompleted TempUserStatus = "Completed"
TmpUserRevoked TempUserStatus = "Revoked"
)
// TempUser holds data for org invites and unconfirmed sign ups
type TempUser struct {
Id int64
OrgId int64
Version int
Email string
Name string
Role RoleType
InvitedByUserId int64
Status TempUserStatus
EmailSent bool
EmailSentOn time.Time
Code string
RemoteAddr string
Created time.Time
Updated time.Time
}
// ---------------------
// COMMANDS
type CreateTempUserCommand struct {
Email string
Name string
OrgId int64
InvitedByUserId int64
Status TempUserStatus
Code string
Role RoleType
RemoteAddr string
Result *TempUser
}
type UpdateTempUserStatusCommand struct {
Code string
Status TempUserStatus
}
type UpdateTempUserWithEmailSentCommand struct {
Code string
}
type GetTempUsersQuery struct {
OrgId int64
Email string
Status TempUserStatus
Result []*TempUserDTO
}
type GetTempUserByCodeQuery struct {
Code string
Result *TempUserDTO
}
type TempUserDTO struct {
Id int64 `json:"id"`
OrgId int64 `json:"orgId"`
Name string `json:"name"`
Email string `json:"email"`
Role RoleType `json:"role"`
InvitedByLogin string `json:"invitedByLogin"`
InvitedByEmail string `json:"invitedByEmail"`
InvitedByName string `json:"invitedByName"`
Code string `json:"code"`
Status TempUserStatus `json:"status"`
Url string `json:"url"`
EmailSent bool `json:"emailSent"`
EmailSentOn time.Time `json:"emailSentOn"`
Created time.Time `json:"createdOn"`
}
| pkg/models/temp_user.go | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00026180114946328104,
0.00017851483426056802,
0.00016637842054478824,
0.00016857226728461683,
0.000027985139240627177
] |
{
"id": 10,
"code_window": [
"\n",
"Example of group by and order by with $__timeGroup:\n",
"SELECT\n",
" min(date_time_col) AS time_sec,\n",
" sum(value_double) as value\n",
"FROM yourtable\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep"
],
"after_edit": [
" $__timeGroup(date_time_col, '1h') AS time,\n",
" sum(value) as value\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 58
} | ## Version 1.3 (2016-12-01)
Changes:
- Go 1.1 is no longer supported
- Use decimals fields in MySQL to format time types (#249)
- Buffer optimizations (#269)
- TLS ServerName defaults to the host (#283)
- Refactoring (#400, #410, #437)
- Adjusted documentation for second generation CloudSQL (#485)
- Documented DSN system var quoting rules (#502)
- Made statement.Close() calls idempotent to avoid errors in Go 1.6+ (#512)
New Features:
- Enable microsecond resolution on TIME, DATETIME and TIMESTAMP (#249)
- Support for returning table alias on Columns() (#289, #359, #382)
- Placeholder interpolation, can be actived with the DSN parameter `interpolateParams=true` (#309, #318, #490)
- Support for uint64 parameters with high bit set (#332, #345)
- Cleartext authentication plugin support (#327)
- Exported ParseDSN function and the Config struct (#403, #419, #429)
- Read / Write timeouts (#401)
- Support for JSON field type (#414)
- Support for multi-statements and multi-results (#411, #431)
- DSN parameter to set the driver-side max_allowed_packet value manually (#489)
- Native password authentication plugin support (#494, #524)
Bugfixes:
- Fixed handling of queries without columns and rows (#255)
- Fixed a panic when SetKeepAlive() failed (#298)
- Handle ERR packets while reading rows (#321)
- Fixed reading NULL length-encoded integers in MySQL 5.6+ (#349)
- Fixed absolute paths support in LOAD LOCAL DATA INFILE (#356)
- Actually zero out bytes in handshake response (#378)
- Fixed race condition in registering LOAD DATA INFILE handler (#383)
- Fixed tests with MySQL 5.7.9+ (#380)
- QueryUnescape TLS config names (#397)
- Fixed "broken pipe" error by writing to closed socket (#390)
- Fixed LOAD LOCAL DATA INFILE buffering (#424)
- Fixed parsing of floats into float64 when placeholders are used (#434)
- Fixed DSN tests with Go 1.7+ (#459)
- Handle ERR packets while waiting for EOF (#473)
- Invalidate connection on error while discarding additional results (#513)
- Allow terminating packets of length 0 (#516)
## Version 1.2 (2014-06-03)
Changes:
- We switched back to a "rolling release". `go get` installs the current master branch again
- Version v1 of the driver will not be maintained anymore. Go 1.0 is no longer supported by this driver
- Exported errors to allow easy checking from application code
- Enabled TCP Keepalives on TCP connections
- Optimized INFILE handling (better buffer size calculation, lazy init, ...)
- The DSN parser also checks for a missing separating slash
- Faster binary date / datetime to string formatting
- Also exported the MySQLWarning type
- mysqlConn.Close returns the first error encountered instead of ignoring all errors
- writePacket() automatically writes the packet size to the header
- readPacket() uses an iterative approach instead of the recursive approach to merge splitted packets
New Features:
- `RegisterDial` allows the usage of a custom dial function to establish the network connection
- Setting the connection collation is possible with the `collation` DSN parameter. This parameter should be preferred over the `charset` parameter
- Logging of critical errors is configurable with `SetLogger`
- Google CloudSQL support
Bugfixes:
- Allow more than 32 parameters in prepared statements
- Various old_password fixes
- Fixed TestConcurrent test to pass Go's race detection
- Fixed appendLengthEncodedInteger for large numbers
- Renamed readLengthEnodedString to readLengthEncodedString and skipLengthEnodedString to skipLengthEncodedString (fixed typo)
## Version 1.1 (2013-11-02)
Changes:
- Go-MySQL-Driver now requires Go 1.1
- Connections now use the collation `utf8_general_ci` by default. Adding `&charset=UTF8` to the DSN should not be necessary anymore
- Made closing rows and connections error tolerant. This allows for example deferring rows.Close() without checking for errors
- `[]byte(nil)` is now treated as a NULL value. Before, it was treated like an empty string / `[]byte("")`
- DSN parameter values must now be url.QueryEscape'ed. This allows text values to contain special characters, such as '&'.
- Use the IO buffer also for writing. This results in zero allocations (by the driver) for most queries
- Optimized the buffer for reading
- stmt.Query now caches column metadata
- New Logo
- Changed the copyright header to include all contributors
- Improved the LOAD INFILE documentation
- The driver struct is now exported to make the driver directly accessible
- Refactored the driver tests
- Added more benchmarks and moved all to a separate file
- Other small refactoring
New Features:
- Added *old_passwords* support: Required in some cases, but must be enabled by adding `allowOldPasswords=true` to the DSN since it is insecure
- Added a `clientFoundRows` parameter: Return the number of matching rows instead of the number of rows changed on UPDATEs
- Added TLS/SSL support: Use a TLS/SSL encrypted connection to the server. Custom TLS configs can be registered and used
Bugfixes:
- Fixed MySQL 4.1 support: MySQL 4.1 sends packets with lengths which differ from the specification
- Convert to DB timezone when inserting `time.Time`
- Splitted packets (more than 16MB) are now merged correctly
- Fixed false positive `io.EOF` errors when the data was fully read
- Avoid panics on reuse of closed connections
- Fixed empty string producing false nil values
- Fixed sign byte for positive TIME fields
## Version 1.0 (2013-05-14)
Initial Release
| vendor/github.com/go-sql-driver/mysql/CHANGELOG.md | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00022319173149298877,
0.00017438844952266663,
0.00016314891399815679,
0.00016708322800695896,
0.000017578682673047297
] |
{
"id": 11,
"code_window": [
"FROM yourtable\n",
"group by $__timeGroup(date_time_col, '1h')\n",
"order by $__timeGroup(date_time_col, '1h') ASC\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> to_timestamp(1492750877)\n"
],
"labels": [
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"GROUP BY time\n",
"ORDER BY time\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 61
} | package postgres
import (
"fmt"
"regexp"
"strings"
"github.com/grafana/grafana/pkg/tsdb"
)
//const rsString = `(?:"([^"]*)")`;
const rsIdentifier = `([_a-zA-Z0-9]+)`
const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)`
type PostgresMacroEngine struct {
TimeRange *tsdb.TimeRange
}
func NewPostgresMacroEngine() tsdb.SqlMacroEngine {
return &PostgresMacroEngine{}
}
func (m *PostgresMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) {
m.TimeRange = timeRange
rExp, _ := regexp.Compile(sExpr)
var macroError error
sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string {
res, err := m.evaluateMacro(groups[1], strings.Split(groups[2], ","))
if err != nil && macroError == nil {
macroError = err
return "macro_error()"
}
return res
})
if macroError != nil {
return "", macroError
}
return sql, nil
}
func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
result := ""
lastIndex := 0
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
groups := []string{}
for i := 0; i < len(v); i += 2 {
groups = append(groups, str[v[i]:v[i+1]])
}
result += str[lastIndex:v[0]] + repl(groups)
lastIndex = v[1]
}
return result + str[lastIndex:]
}
func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, error) {
switch name {
case "__time":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s AS \"time\"", args[0]), nil
case "__timeEpoch":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil
case "__timeFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("extract(epoch from %s) BETWEEN %d AND %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeFrom":
return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__timeTo":
return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__timeGroup":
if len(args) < 2 {
return "", fmt.Errorf("macro %v needs time column and interval", name)
}
return fmt.Sprintf("(extract(epoch from \"%s\")/extract(epoch from %s::interval))::int*extract(epoch from %s::interval)", args[0], args[1], args[1]), nil
case "__unixEpochFilter":
if len(args) == 0 {
return "", fmt.Errorf("missing time column argument for macro %v", name)
}
return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
case "__unixEpochFrom":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil
case "__unixEpochTo":
return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil
default:
return "", fmt.Errorf("Unknown macro %v", name)
}
}
| pkg/tsdb/postgres/macros.go | 1 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.8226969242095947,
0.08366003632545471,
0.00016779682482592762,
0.0007608666783198714,
0.24635280668735504
] |
{
"id": 11,
"code_window": [
"FROM yourtable\n",
"group by $__timeGroup(date_time_col, '1h')\n",
"order by $__timeGroup(date_time_col, '1h') ASC\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> to_timestamp(1492750877)\n"
],
"labels": [
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"GROUP BY time\n",
"ORDER BY time\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 61
} | length(strings[0]) | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-112 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00016557538765482605,
0.00016557538765482605,
0.00016557538765482605,
0.00016557538765482605,
0
] |
{
"id": 11,
"code_window": [
"FROM yourtable\n",
"group by $__timeGroup(date_time_col, '1h')\n",
"order by $__timeGroup(date_time_col, '1h') ASC\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> to_timestamp(1492750877)\n"
],
"labels": [
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"GROUP BY time\n",
"ORDER BY time\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 61
} | [default]
region = default_region
[partial_creds]
aws_access_key_id = AKID
[profile alt_profile_name]
region = alt_profile_name_region
[creds_from_credentials]
aws_access_key_id = creds_from_config_akid
aws_secret_access_key = creds_from_config_secret
[config_file_load_order]
region = shared_config_other_region
aws_access_key_id = shared_config_other_akid
aws_secret_access_key = shared_config_other_secret
| vendor/github.com/aws/aws-sdk-go/aws/session/testdata/shared_config_other | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017140181444119662,
0.000168431390193291,
0.0001654609659453854,
0.000168431390193291,
0.000002970424247905612
] |
{
"id": 11,
"code_window": [
"FROM yourtable\n",
"group by $__timeGroup(date_time_col, '1h')\n",
"order by $__timeGroup(date_time_col, '1h') ASC\n",
"\n",
"Or build your own conditionals using these macros which just return the values:\n",
"- $__timeFrom() -> to_timestamp(1492750877)\n"
],
"labels": [
"keep",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"GROUP BY time\n",
"ORDER BY time\n"
],
"file_path": "public/app/plugins/datasource/postgres/partials/query.editor.html",
"type": "replace",
"edit_start_line_idx": 61
} | foo.{"bar": bar, "baz": baz} | vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-423 | 0 | https://github.com/grafana/grafana/commit/34da0711abf93fa54376a21452660d2a9f4545df | [
0.00017083548300433904,
0.00017083548300433904,
0.00017083548300433904,
0.00017083548300433904,
0
] |
{
"id": 0,
"code_window": [
" return <h1>About</h1>\n",
" }\n",
"})\n",
"\n",
"export default About\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = About"
],
"file_path": "examples/auth-with-shared-root/components/About.js",
"type": "replace",
"edit_start_line_idx": 8
} | import React from 'react'
import { Link } from 'react-router'
import auth from '../utils/auth'
const App = React.createClass({
getInitialState() {
return {
loggedIn: auth.loggedIn()
}
},
updateAuth(loggedIn) {
this.setState({
loggedIn: !!loggedIn
})
},
componentWillMount() {
auth.onChange = this.updateAuth
auth.login()
},
render() {
return (
<div>
<ul>
<li>
{this.state.loggedIn ? (
<Link to="/logout">Log out</Link>
) : (
<Link to="/login">Sign in</Link>
)}
</li>
<li><Link to="/about">About</Link></li>
<li><Link to="/">Home</Link> (changes depending on auth status)</li>
<li><Link to="/page2">Page Two</Link> (authenticated)</li>
<li><Link to="/user/foo">User: Foo</Link> (authenticated)</li>
</ul>
{this.props.children}
</div>
)
}
})
export default App
| examples/auth-with-shared-root/components/App.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.002051413059234619,
0.0005584742175415158,
0.00016491112182848155,
0.00016994756879284978,
0.0007469526026397943
] |
{
"id": 0,
"code_window": [
" return <h1>About</h1>\n",
" }\n",
"})\n",
"\n",
"export default About\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = About"
],
"file_path": "examples/auth-with-shared-root/components/About.js",
"type": "replace",
"edit_start_line_idx": 8
} | module.exports = {
login(email, pass, cb) {
cb = arguments[arguments.length - 1]
if (localStorage.token) {
if (cb) cb(true)
this.onChange(true)
return
}
pretendRequest(email, pass, (res) => {
if (res.authenticated) {
localStorage.token = res.token
if (cb) cb(true)
this.onChange(true)
} else {
if (cb) cb(false)
this.onChange(false)
}
})
},
getToken: function () {
return localStorage.token
},
logout: function (cb) {
delete localStorage.token
if (cb) cb()
this.onChange(false)
},
loggedIn: function () {
return !!localStorage.token
},
onChange: function () {}
}
function pretendRequest(email, pass, cb) {
setTimeout(() => {
if (email === '[email protected]' && pass === 'password1') {
cb({
authenticated: true,
token: Math.random().toString(36).substring(7)
})
} else {
cb({ authenticated: false })
}
}, 0)
}
| examples/auth-with-shared-root/utils/auth.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017036146891769022,
0.00016948209668044,
0.00016852929547894746,
0.0001691649085842073,
7.465598059752665e-7
] |
{
"id": 0,
"code_window": [
" return <h1>About</h1>\n",
" }\n",
"})\n",
"\n",
"export default About\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = About"
],
"file_path": "examples/auth-with-shared-root/components/About.js",
"type": "replace",
"edit_start_line_idx": 8
} | .Image {
position: absolute;
height: 400px;
width: 400px;
}
.example-enter {
opacity: 0.01;
transition: opacity .5s ease-in;
}
.example-enter.example-enter-active {
opacity: 1;
}
.example-leave {
opacity: 1;
transition: opacity .5s ease-in;
}
.example-leave.example-leave-active {
opacity: 0;
}
.link-active {
color: #bbbbbb;
text-decoration: none;
}
| examples/animations/app.css | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017523186397738755,
0.0001737862330628559,
0.00017273207777179778,
0.0001733947719912976,
0.0000010574077577985008
] |
{
"id": 0,
"code_window": [
" return <h1>About</h1>\n",
" }\n",
"})\n",
"\n",
"export default About\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = About"
],
"file_path": "examples/auth-with-shared-root/components/About.js",
"type": "replace",
"edit_start_line_idx": 8
} | {
"env": {
"mocha": true
},
"rules": {
"react/prop-types": 0
}
}
| modules/__tests__/.eslintrc | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00016987838898785412,
0.00016987838898785412,
0.00016987838898785412,
0.00016987838898785412,
0
] |
{
"id": 1,
"code_window": [
" )\n",
" }\n",
"\n",
"})\n",
"\n",
"export default App\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = App"
],
"file_path": "examples/auth-with-shared-root/components/App.js",
"type": "replace",
"edit_start_line_idx": 46
} | import React from 'react'
const About = React.createClass({
render() {
return <h1>About</h1>
}
})
export default About
| examples/auth-with-shared-root/components/About.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0024646101519465446,
0.0024646101519465446,
0.0024646101519465446,
0.0024646101519465446,
0
] |
{
"id": 1,
"code_window": [
" )\n",
" }\n",
"\n",
"})\n",
"\n",
"export default App\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = App"
],
"file_path": "examples/auth-with-shared-root/components/App.js",
"type": "replace",
"edit_start_line_idx": 46
} | aside {
position: absolute;
left: 0;
width: 300px;
}
main {
position: absolute;
left: 310px;
}
ul.breadcrumbs-list {
margin: 0;
padding: 0;
}
ul.breadcrumbs-list li {
display: inline-block;
margin-right: 20px;
}
ul.breadcrumbs-list li a:not(.breadcrumb-active) {
font-weight: bold;
margin-right: 20px;
}
ul.breadcrumbs-list li a.breadcrumb-active {
text-decoration: none;
cursor: default;
}
| examples/breadcrumbs/app.css | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0036403718404471874,
0.001078253029845655,
0.00016822938050609082,
0.0002522054419387132,
0.0014799071941524744
] |
{
"id": 1,
"code_window": [
" )\n",
" }\n",
"\n",
"})\n",
"\n",
"export default App\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = App"
],
"file_path": "examples/auth-with-shared-root/components/App.js",
"type": "replace",
"edit_start_line_idx": 46
} | import expect, { spyOn } from 'expect'
import React, { Component } from 'react'
import { Simulate } from 'react-addons-test-utils'
import { render } from 'react-dom'
import execSteps from './execSteps'
import createHistory from 'history/lib/createMemoryHistory'
import Router from '../Router'
import Route from '../Route'
import Link from '../Link'
import shouldWarn from './shouldWarn'
const { click } = Simulate
describe('v1 Link', function () {
class Hello extends Component {
render() {
return <div>Hello {this.props.params.name}!</div>
}
}
let node
beforeEach(function () {
node = document.createElement('div')
})
beforeEach(function () {
shouldWarn('deprecated')
})
it('knows how to make its href', function () {
class LinkWrapper extends Component {
render() {
return <Link to="/hello/michael" query={{ the: 'query' }} hash="#the-hash">Link</Link>
}
}
render((
<Router history={createHistory('/')}>
<Route path="/" component={LinkWrapper} />
</Router>
), node, function () {
const a = node.querySelector('a')
expect(a.getAttribute('href')).toEqual('/hello/michael?the=query#the-hash')
})
})
describe('with params', function () {
class App extends Component {
render() {
return (
<div>
<Link
to="/hello/michael"
activeClassName="active"
>
Michael
</Link>
<Link
to="hello/ryan" query={{ the: 'query' }}
activeClassName="active"
>
Ryan
</Link>
</div>
)
}
}
it('is active when its params match', function (done) {
render((
<Router history={createHistory('/hello/michael')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[0]
expect(a.className.trim()).toEqual('active')
done()
})
})
it('is not active when its params do not match', function (done) {
render((
<Router history={createHistory('/hello/michael')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('')
done()
})
})
it('is active when its params and query match', function (done) {
render((
<Router history={createHistory('/hello/ryan?the=query')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('active')
done()
})
})
it('is not active when its query does not match', function (done) {
render((
<Router history={createHistory('/hello/ryan?the=other+query')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('')
done()
})
})
})
it('transitions to the correct route with deprecated props', function (done) {
class LinkWrapper extends Component {
handleClick() {
// just here to make sure click handlers don't prevent it from happening
}
render() {
return <Link to="/hello" hash="#world" query={{ how: 'are' }} state={{ you: 'doing?' }} onClick={(e) => this.handleClick(e)}>Link</Link>
}
}
const history = createHistory('/')
const spy = spyOn(history, 'push').andCallThrough()
const steps = [
function () {
click(node.querySelector('a'), { button: 0 })
},
function () {
expect(node.innerHTML).toMatch(/Hello/)
expect(spy).toHaveBeenCalled()
const { location } = this.state
expect(location.pathname).toEqual('/hello')
expect(location.search).toEqual('?how=are')
expect(location.hash).toEqual('#world')
expect(location.state).toEqual({ you: 'doing?' })
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper} />
<Route path="/hello" component={Hello} />
</Router>
), node, execNextStep)
})
})
| modules/__tests__/_bc-Link-test.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.9914135932922363,
0.23367471992969513,
0.00016929383855313063,
0.00018380366964265704,
0.4189952611923218
] |
{
"id": 1,
"code_window": [
" )\n",
" }\n",
"\n",
"})\n",
"\n",
"export default App\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = App"
],
"file_path": "examples/auth-with-shared-root/components/App.js",
"type": "replace",
"edit_start_line_idx": 46
} | import React, { createClass } from 'react'
import { render } from 'react-dom'
import { Router, Route, IndexRoute, browserHistory, Link, withRouter } from 'react-router'
import withExampleBasename from '../withExampleBasename'
function App(props) {
return (
<div>
{props.children}
</div>
)
}
const Form = withRouter(
createClass({
getInitialState() {
return {
value: ''
}
},
submitAction(event) {
event.preventDefault()
this.props.router.push({
pathname: '/page',
query: {
qsparam: this.state.value
}
})
},
handleChange(event) {
this.setState({ value: event.target.value })
},
render() {
return (
<form onSubmit={this.submitAction}>
<p>Token is <em>pancakes</em></p>
<input type="text" value={this.state.value} onChange={this.handleChange} />
<button type="submit">Submit the thing</button>
<p><Link to="/page?qsparam=pancakes">Or authenticate via URL</Link></p>
<p><Link to="/page?qsparam=bacon">Or try failing to authenticate via URL</Link></p>
</form>
)
}
})
)
function Page() {
return <h1>Hey, I see you are authenticated. Welcome!</h1>
}
function ErrorPage() {
return <h1>Oh no! Your auth failed!</h1>
}
function requireCredentials(nextState, replace, next) {
const query = nextState.location.query
if (query.qsparam) {
serverAuth(query.qsparam)
.then(
() => next(),
() => {
replace('/error')
next()
}
)
} else {
replace('/error')
next()
}
}
function serverAuth(authToken) {
return new Promise((resolve, reject) => {
// That server is gonna take a while
setTimeout(() => {
if(authToken === 'pancakes') {
resolve('authenticated')
} else {
reject('nope')
}
}, 200)
})
}
render((
<Router history={withExampleBasename(browserHistory, __dirname)}>
<Route path="/" component={App}>
<IndexRoute component={Form} />
<Route path="page" component={Page} onEnter={requireCredentials}/>
<Route path="error" component={ErrorPage}/>
</Route>
</Router>
), document.getElementById('example'))
| examples/auth-flow-async-with-query-params/app.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.9366427659988403,
0.09455164521932602,
0.00016897436580620706,
0.00022002620971761644,
0.2807047963142395
] |
{
"id": 2,
"code_window": [
" }\n",
"})\n",
"\n",
"export default Dashboard\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Dashboard"
],
"file_path": "examples/auth-with-shared-root/components/Dashboard.js",
"type": "replace",
"edit_start_line_idx": 18
} | import React from 'react'
import { Link } from 'react-router'
import auth from '../utils/auth'
const App = React.createClass({
getInitialState() {
return {
loggedIn: auth.loggedIn()
}
},
updateAuth(loggedIn) {
this.setState({
loggedIn: !!loggedIn
})
},
componentWillMount() {
auth.onChange = this.updateAuth
auth.login()
},
render() {
return (
<div>
<ul>
<li>
{this.state.loggedIn ? (
<Link to="/logout">Log out</Link>
) : (
<Link to="/login">Sign in</Link>
)}
</li>
<li><Link to="/about">About</Link></li>
<li><Link to="/">Home</Link> (changes depending on auth status)</li>
<li><Link to="/page2">Page Two</Link> (authenticated)</li>
<li><Link to="/user/foo">User: Foo</Link> (authenticated)</li>
</ul>
{this.props.children}
</div>
)
}
})
export default App
| examples/auth-with-shared-root/components/App.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0006937825819477439,
0.00027309771394357085,
0.00016639380191918463,
0.00016834892448969185,
0.00021034502424299717
] |
{
"id": 2,
"code_window": [
" }\n",
"})\n",
"\n",
"export default Dashboard\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Dashboard"
],
"file_path": "examples/auth-with-shared-root/components/Dashboard.js",
"type": "replace",
"edit_start_line_idx": 18
} | import expect, { spyOn } from 'expect'
import React, { Component } from 'react'
import { Simulate } from 'react-addons-test-utils'
import { render } from 'react-dom'
import createHistory from '../createMemoryHistory'
import execSteps from './execSteps'
import Router from '../Router'
import Route from '../Route'
import Link from '../Link'
const { click } = Simulate
describe('A <Link>', function () {
class Hello extends Component {
render() {
return <div>Hello {this.props.params.name}!</div>
}
}
class Goodbye extends Component {
render() {
return <div>Goodbye</div>
}
}
let node
beforeEach(function () {
node = document.createElement('div')
})
it('should not render unnecessary class=""', function () {
render((
<Link to="/something" />
), node, function () {
const a = node.querySelector('a')
expect(a.hasAttribute('class')).toBe(false)
})
})
it('knows how to make its href', function () {
class LinkWrapper extends Component {
render() {
return (
<Link to={{
pathname: '/hello/michael',
query: { the: 'query' },
hash: '#the-hash'
}}>
Link
</Link>
)
}
}
render((
<Router history={createHistory('/')}>
<Route path="/" component={LinkWrapper} />
</Router>
), node, function () {
const a = node.querySelector('a')
expect(a.getAttribute('href')).toEqual('/hello/michael?the=query#the-hash')
})
})
// This test needs to be in its own file with beforeEach(resetHash).
//
//it('knows how to make its href with HashHistory', function () {
// class LinkWrapper extends Component {
// render() {
// return <Link to="/hello/michael" query={{the: 'query'}}>Link</Link>
// }
// }
// render((
// <Router history={new HashHistory}>
// <Route path="/" component={LinkWrapper} />
// </Router>
// ), node, function () {
// const a = node.querySelector('a')
// expect(a.getAttribute('href')).toEqual('#/hello/michael?the=query')
// })
//})
describe('with params', function () {
class App extends Component {
render() {
return (
<div>
<Link
to="/hello/michael"
activeClassName="active"
>
Michael
</Link>
<Link
to={{ pathname: '/hello/ryan', query: { the: 'query' } }}
activeClassName="active"
>
Ryan
</Link>
</div>
)
}
}
it('is active when its params match', function (done) {
render((
<Router history={createHistory('/hello/michael')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[0]
expect(a.className.trim()).toEqual('active')
done()
})
})
it('is not active when its params do not match', function (done) {
render((
<Router history={createHistory('/hello/michael')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('')
done()
})
})
it('is active when its params and query match', function (done) {
render((
<Router history={createHistory('/hello/ryan?the=query')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('active')
done()
})
})
it('is not active when its query does not match', function (done) {
render((
<Router history={createHistory('/hello/ryan?the=other+query')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('')
done()
})
})
})
describe('when its route is active and className is empty', function () {
it("it shouldn't have an active class", function (done) {
class LinkWrapper extends Component {
render() {
return (
<div>
<Link to="/hello" className="dontKillMe" activeClassName="">Link</Link>
{this.props.children}
</div>
)
}
}
const history = createHistory('/goodbye')
let a
const steps = [
function () {
a = node.querySelector('a')
expect(a.className).toEqual('dontKillMe')
history.push('/hello')
},
function () {
expect(a.className).toEqual('dontKillMe')
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper}>
<Route path="goodbye" component={Goodbye} />
<Route path="hello" component={Hello} />
</Route>
</Router>
), node, execNextStep)
})
})
describe('when its route is active', function () {
it('has its activeClassName', function (done) {
class LinkWrapper extends Component {
render() {
return (
<div>
<Link to="/hello" className="dontKillMe" activeClassName="highlight">Link</Link>
{this.props.children}
</div>
)
}
}
let a
const history = createHistory('/goodbye')
const steps = [
function () {
a = node.querySelector('a')
expect(a.className).toEqual('dontKillMe')
history.push('/hello')
},
function () {
expect(a.className).toEqual('dontKillMe highlight')
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper}>
<Route path="goodbye" component={Goodbye} />
<Route path="hello" component={Hello} />
</Route>
</Router>
), node, execNextStep)
})
it('has its activeStyle', function (done) {
class LinkWrapper extends Component {
render() {
return (
<div>
<Link to="/hello" style={{ color: 'white' }} activeStyle={{ color: 'red' }}>Link</Link>
{this.props.children}
</div>
)
}
}
let a
const history = createHistory('/goodbye')
const steps = [
function () {
a = node.querySelector('a')
expect(a.style.color).toEqual('white')
history.push('/hello')
},
function () {
expect(a.style.color).toEqual('red')
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper}>
<Route path="hello" component={Hello} />
<Route path="goodbye" component={Goodbye} />
</Route>
</Router>
), node, execNextStep)
})
})
describe('when route changes', function () {
it('changes active state', function (done) {
class LinkWrapper extends Component {
render() {
return (
<div>
<Link to="/hello" activeClassName="active">Link</Link>
{this.props.children}
</div>
)
}
}
let a
const history = createHistory('/goodbye')
const steps = [
function () {
a = node.querySelector('a')
expect(a.className).toEqual('')
history.push('/hello')
},
function () {
expect(a.className).toEqual('active')
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper}>
<Route path="goodbye" component={Goodbye} />
<Route path="hello" component={Hello} />
</Route>
</Router>
), node, execNextStep)
})
})
describe('when clicked', function () {
it('calls a user defined click handler', function (done) {
class LinkWrapper extends Component {
handleClick(event) {
event.preventDefault()
done()
}
render() {
return <Link to="/hello" onClick={e => this.handleClick(e)}>Link</Link>
}
}
render((
<Router history={createHistory('/')}>
<Route path="/" component={LinkWrapper} />
<Route path="/hello" component={Hello} />
</Router>
), node, () => {
click(node.querySelector('a'))
})
})
it('transitions to the correct route for string', function (done) {
class LinkWrapper extends Component {
render() {
return (
<Link to="/hello?the=query#hash">
Link
</Link>
)
}
}
const history = createHistory('/')
const spy = spyOn(history, 'push').andCallThrough()
const steps = [
function () {
click(node.querySelector('a'), { button: 0 })
},
function () {
expect(node.innerHTML).toMatch(/Hello/)
expect(spy).toHaveBeenCalled()
const { location } = this.state
expect(location.pathname).toEqual('/hello')
expect(location.search).toEqual('?the=query')
expect(location.hash).toEqual('#hash')
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper} />
<Route path="/hello" component={Hello} />
</Router>
), node, execNextStep)
})
it('transitions to the correct route for object', function (done) {
class LinkWrapper extends Component {
render() {
return (
<Link
to={{
pathname: '/hello',
query: { how: 'are' },
hash: '#world',
state: { you: 'doing?' }
}}
>
Link
</Link>
)
}
}
const history = createHistory('/')
const spy = spyOn(history, 'push').andCallThrough()
const steps = [
function () {
click(node.querySelector('a'), { button: 0 })
},
function () {
expect(node.innerHTML).toMatch(/Hello/)
expect(spy).toHaveBeenCalled()
const { location } = this.state
expect(location.pathname).toEqual('/hello')
expect(location.search).toEqual('?how=are')
expect(location.hash).toEqual('#world')
expect(location.state).toEqual({ you: 'doing?' })
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper} />
<Route path="/hello" component={Hello} />
</Router>
), node, execNextStep)
})
it('does not transition when onClick prevents default', function (done) {
class LinkWrapper extends Component {
render() {
return <Link to="/hello" onClick={(e) => e.preventDefault()}>Link</Link>
}
}
const history = createHistory('/')
const spy = spyOn(history, 'push').andCallThrough()
const steps = [
function () {
click(node.querySelector('a'), { button: 0 })
},
function () {
expect(node.innerHTML).toMatch(/Link/)
expect(spy).toNotHaveBeenCalled()
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper} />
<Route path="/hello" component={Hello} />
</Router>
), node, execNextStep)
})
})
})
| modules/__tests__/Link-test.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00018817759701050818,
0.00017054250929504633,
0.00016578976646997035,
0.00016829834203235805,
0.000005093188519822434
] |
{
"id": 2,
"code_window": [
" }\n",
"})\n",
"\n",
"export default Dashboard\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Dashboard"
],
"file_path": "examples/auth-with-shared-root/components/Dashboard.js",
"type": "replace",
"edit_start_line_idx": 18
} | <!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Authentication with query parameters</title>
<link rel="stylesheet" href="/global.css"/>
</head>
<body>
<h1 class="breadcrumbs"><a href="/">React Router Examples</a> / Async Auth with Query Parameters</h1>
<div id="example"/>
<script src="/__build__/shared.js"></script>
<script src="/__build__/auth-flow-async-with-query-params.js"></script>
</body>
</html>
| examples/auth-flow-async-with-query-params/index.html | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00016892507846932858,
0.0001681285648373887,
0.0001673320512054488,
0.0001681285648373887,
7.96513631939888e-7
] |
{
"id": 2,
"code_window": [
" }\n",
"})\n",
"\n",
"export default Dashboard\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Dashboard"
],
"file_path": "examples/auth-with-shared-root/components/Dashboard.js",
"type": "replace",
"edit_start_line_idx": 18
} | <!doctype html public "restroom">
<title>Nested Animations Example</title>
<link href="/global.css" rel="stylesheet"/>
<body>
<h1 class="breadcrumbs"><a href="/">React Router Examples</a> / Nested Animations</h1>
<div id="example"/>
<script src="/__build__/shared.js"></script>
<script src="/__build__/nested-animations.js"></script>
| examples/nested-animations/index.html | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00016784403123892844,
0.00016784403123892844,
0.00016784403123892844,
0.00016784403123892844,
0
] |
{
"id": 3,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default Landing\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Landing"
],
"file_path": "examples/auth-with-shared-root/components/Landing.js",
"type": "replace",
"edit_start_line_idx": 16
} | import React from 'react'
const Landing = React.createClass({
render() {
return (
<div>
<h1>Landing Page</h1>
<p>This page is only shown to unauthenticated users.</p>
<p>Partial / Lazy loading. Open the network tab while you navigate. Notice that only the required components are downloaded as you navigate around.</p>
</div>
)
}
})
export default Landing
| examples/auth-with-shared-root/components/Landing.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.4603992998600006,
0.23391534388065338,
0.007431389763951302,
0.23391534388065338,
0.22648395597934723
] |
{
"id": 3,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default Landing\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Landing"
],
"file_path": "examples/auth-with-shared-root/components/Landing.js",
"type": "replace",
"edit_start_line_idx": 16
} | import expect from 'expect'
import React, { cloneElement } from 'react'
import { render } from 'react-dom'
import Router from '../Router'
import Route from '../Route'
import createMemoryHistory from '../createMemoryHistory'
import applyMiddleware from '../applyRouterMiddleware'
import shouldWarn from './shouldWarn'
const FOO_ROOT_CONTAINER_TEXT = 'FOO ROOT CONTAINER'
const BAR_ROOT_CONTAINER_TEXT = 'BAR ROOT CONTAINER'
const BAZ_CONTAINER_TEXT = 'BAZ INJECTED'
const FooRootContainer = React.createClass({
propTypes: { children: React.PropTypes.node.isRequired },
childContextTypes: { foo: React.PropTypes.string },
getChildContext() { return { foo: FOO_ROOT_CONTAINER_TEXT } },
render() {
return this.props.children
}
})
const FooContainer = React.createClass({
propTypes: { children: React.PropTypes.node.isRequired },
contextTypes: { foo: React.PropTypes.string.isRequired },
render() {
const { children, ...props } = this.props
const fooFromContext = this.context.foo
return cloneElement(children, { ...props, fooFromContext })
}
})
const useFoo = () => ({
renderRouterContext: (child) => (
<FooRootContainer>{child}</FooRootContainer>
),
renderRouteComponent: (child) => (
<FooContainer>{child}</FooContainer>
)
})
const BarRootContainer = React.createClass({
propTypes: { children: React.PropTypes.node.isRequired },
childContextTypes: { bar: React.PropTypes.string },
getChildContext() { return { bar: BAR_ROOT_CONTAINER_TEXT } },
render() {
return this.props.children
}
})
const BarContainer = React.createClass({
propTypes: { children: React.PropTypes.node.isRequired },
contextTypes: { bar: React.PropTypes.string.isRequired },
render() {
const { children, ...props } = this.props
const barFromContext = this.context.bar
return cloneElement(children, { ...props, barFromContext })
}
})
const useBar = () => ({
renderRouterContext: (child) => (
<BarRootContainer>{child}</BarRootContainer>
),
renderRouteComponent: (child) => (
<BarContainer>{child}</BarContainer>
)
})
const useBaz = (bazInjected) => ({
renderRouteComponent: (child) => (
cloneElement(child, { bazInjected })
)
})
const run = ({ renderWithMiddleware, Component }, assertion) => {
const div = document.createElement('div')
const routes = <Route path="/" component={Component}/>
render(<Router
render={renderWithMiddleware}
routes={routes}
history={createMemoryHistory('/')}
/>, div, () => assertion(div.innerHTML))
}
describe('applyMiddleware', () => {
it('applies one middleware', (done) => {
run({
renderWithMiddleware: applyMiddleware(useFoo()),
Component: (props) => <div>{props.fooFromContext}</div>
}, (html) => {
expect(html).toContain(FOO_ROOT_CONTAINER_TEXT)
done()
})
})
it('applies more than one middleware', (done) => {
run({
renderWithMiddleware: applyMiddleware(useBar(), useFoo()),
Component: (props) => <div>{props.fooFromContext} {props.barFromContext}</div>
}, (html) => {
expect(html).toContain(FOO_ROOT_CONTAINER_TEXT)
expect(html).toContain(BAR_ROOT_CONTAINER_TEXT)
done()
})
})
it('applies more middleware with only `getContainer`', (done) => {
run({
renderWithMiddleware: applyMiddleware(
useBar(),
useFoo(),
useBaz(BAZ_CONTAINER_TEXT)
),
Component: (props) => (
<div>
{props.fooFromContext}
{props.barFromContext}
{props.bazInjected}
</div>
)
}, (html) => {
expect(html).toContain(FOO_ROOT_CONTAINER_TEXT)
expect(html).toContain(BAR_ROOT_CONTAINER_TEXT)
expect(html).toContain(BAZ_CONTAINER_TEXT)
done()
})
})
it('applies middleware that only has `getContainer`', (done) => {
run({
renderWithMiddleware: applyMiddleware(
useBaz(BAZ_CONTAINER_TEXT)
),
Component: (props) => (
<div>{props.bazInjected}</div>
)
}, (html) => {
expect(html).toContain(BAZ_CONTAINER_TEXT)
done()
})
})
it('should warn on invalid middleware', () => {
shouldWarn('at index 0 does not appear to be a valid')
shouldWarn('at index 2 does not appear to be a valid')
applyMiddleware(
{},
{ renderRouterContext: () => {} },
{}
)
})
})
| modules/__tests__/applyRouterMiddleware-test.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0005436865030787885,
0.00019371396047063172,
0.00016441495972685516,
0.0001688428019406274,
0.00009048681386047974
] |
{
"id": 3,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default Landing\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Landing"
],
"file_path": "examples/auth-with-shared-root/components/Landing.js",
"type": "replace",
"edit_start_line_idx": 16
} | import expect, { spyOn } from 'expect'
import React, { Component } from 'react'
import { Simulate } from 'react-addons-test-utils'
import { render } from 'react-dom'
import execSteps from './execSteps'
import createHistory from 'history/lib/createMemoryHistory'
import Router from '../Router'
import Route from '../Route'
import Link from '../Link'
import shouldWarn from './shouldWarn'
const { click } = Simulate
describe('v1 Link', function () {
class Hello extends Component {
render() {
return <div>Hello {this.props.params.name}!</div>
}
}
let node
beforeEach(function () {
node = document.createElement('div')
})
beforeEach(function () {
shouldWarn('deprecated')
})
it('knows how to make its href', function () {
class LinkWrapper extends Component {
render() {
return <Link to="/hello/michael" query={{ the: 'query' }} hash="#the-hash">Link</Link>
}
}
render((
<Router history={createHistory('/')}>
<Route path="/" component={LinkWrapper} />
</Router>
), node, function () {
const a = node.querySelector('a')
expect(a.getAttribute('href')).toEqual('/hello/michael?the=query#the-hash')
})
})
describe('with params', function () {
class App extends Component {
render() {
return (
<div>
<Link
to="/hello/michael"
activeClassName="active"
>
Michael
</Link>
<Link
to="hello/ryan" query={{ the: 'query' }}
activeClassName="active"
>
Ryan
</Link>
</div>
)
}
}
it('is active when its params match', function (done) {
render((
<Router history={createHistory('/hello/michael')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[0]
expect(a.className.trim()).toEqual('active')
done()
})
})
it('is not active when its params do not match', function (done) {
render((
<Router history={createHistory('/hello/michael')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('')
done()
})
})
it('is active when its params and query match', function (done) {
render((
<Router history={createHistory('/hello/ryan?the=query')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('active')
done()
})
})
it('is not active when its query does not match', function (done) {
render((
<Router history={createHistory('/hello/ryan?the=other+query')}>
<Route path="/" component={App}>
<Route path="hello/:name" component={Hello} />
</Route>
</Router>
), node, function () {
const a = node.querySelectorAll('a')[1]
expect(a.className.trim()).toEqual('')
done()
})
})
})
it('transitions to the correct route with deprecated props', function (done) {
class LinkWrapper extends Component {
handleClick() {
// just here to make sure click handlers don't prevent it from happening
}
render() {
return <Link to="/hello" hash="#world" query={{ how: 'are' }} state={{ you: 'doing?' }} onClick={(e) => this.handleClick(e)}>Link</Link>
}
}
const history = createHistory('/')
const spy = spyOn(history, 'push').andCallThrough()
const steps = [
function () {
click(node.querySelector('a'), { button: 0 })
},
function () {
expect(node.innerHTML).toMatch(/Hello/)
expect(spy).toHaveBeenCalled()
const { location } = this.state
expect(location.pathname).toEqual('/hello')
expect(location.search).toEqual('?how=are')
expect(location.hash).toEqual('#world')
expect(location.state).toEqual({ you: 'doing?' })
}
]
const execNextStep = execSteps(steps, done)
render((
<Router history={history} onUpdate={execNextStep}>
<Route path="/" component={LinkWrapper} />
<Route path="/hello" component={Hello} />
</Router>
), node, execNextStep)
})
})
| modules/__tests__/_bc-Link-test.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017137255053967237,
0.00016791677626315504,
0.00016420141037087888,
0.00016799214063212276,
0.0000020040004073962336
] |
{
"id": 3,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default Landing\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Landing"
],
"file_path": "examples/auth-with-shared-root/components/Landing.js",
"type": "replace",
"edit_start_line_idx": 16
} | const webpack = require('webpack')
module.exports = {
output: {
library: 'ReactRouter',
libraryTarget: 'umd'
},
externals: [
{
react: {
root: 'React',
commonjs2: 'react',
commonjs: 'react',
amd: 'react'
}
}
],
module: {
loaders: [
{ test: /\.js$/, exclude: /node_modules/, loader: 'babel' }
]
},
node: {
Buffer: false
},
plugins: [
new webpack.optimize.OccurenceOrderPlugin(),
new webpack.DefinePlugin({
'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV)
})
]
}
| webpack.config.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00016998712089844048,
0.00016793725080788136,
0.00016500501078553498,
0.00016837843577377498,
0.000001822130457185267
] |
{
"id": 4,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default withRouter(Login)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = withRouter(Login)"
],
"file_path": "examples/auth-with-shared-root/components/Login.js",
"type": "replace",
"edit_start_line_idx": 46
} | import React from 'react'
import { withRouter } from 'react-router'
import auth from '../utils/auth.js'
const Login = React.createClass({
getInitialState() {
return {
error: false
}
},
handleSubmit(event) {
event.preventDefault()
const email = this.refs.email.value
const pass = this.refs.pass.value
auth.login(email, pass, (loggedIn) => {
if (!loggedIn)
return this.setState({ error: true })
const { location } = this.props
if (location.state && location.state.nextPathname) {
this.props.router.replace(location.state.nextPathname)
} else {
this.props.router.replace('/')
}
})
},
render() {
return (
<form onSubmit={this.handleSubmit}>
<label><input ref="email" placeholder="email" defaultValue="[email protected]" /></label>
<label><input ref="pass" placeholder="password" /></label> (hint: password1)<br />
<button type="submit">login</button>
{this.state.error && (
<p>Bad login information</p>
)}
</form>
)
}
})
export default withRouter(Login)
| examples/auth-with-shared-root/components/Login.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.36962589621543884,
0.0754094123840332,
0.00038519309600815177,
0.000984284095466137,
0.14711894094944
] |
{
"id": 4,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default withRouter(Login)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = withRouter(Login)"
],
"file_path": "examples/auth-with-shared-root/components/Login.js",
"type": "replace",
"edit_start_line_idx": 46
} | {
"extends": ["rackt", "plugin:react/recommended"],
"globals": {
"__DEV__": false
},
"rules": {
"react/display-name": 0,
"react/wrap-multilines": 2
},
"plugins": [
"react"
]
}
| .eslintrc | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017575897800270468,
0.00017121247947216034,
0.000166665980941616,
0.00017121247947216034,
0.000004546498530544341
] |
{
"id": 4,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default withRouter(Login)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = withRouter(Login)"
],
"file_path": "examples/auth-with-shared-root/components/Login.js",
"type": "replace",
"edit_start_line_idx": 46
} | import React from 'react'
import { render, findDOMNode } from 'react-dom'
import { browserHistory, Router, Route, IndexRoute, Link, withRouter } from 'react-router'
import withExampleBasename from '../withExampleBasename'
import ContactStore from './ContactStore'
import './app.css'
const App = React.createClass({
getInitialState() {
return {
contacts: ContactStore.getContacts(),
loading: true
}
},
componentWillMount() {
ContactStore.init()
},
componentDidMount() {
ContactStore.addChangeListener(this.updateContacts)
},
componentWillUnmount() {
ContactStore.removeChangeListener(this.updateContacts)
},
updateContacts() {
this.setState({
contacts: ContactStore.getContacts(),
loading: false
})
},
render() {
const contacts = this.state.contacts.map(function (contact) {
return <li key={contact.id}><Link to={`/contact/${contact.id}`}>{contact.first}</Link></li>
})
return (
<div className="App">
<div className="ContactList">
<Link to="/contact/new">New Contact</Link>
<ul>
{contacts}
</ul>
</div>
<div className="Content">
{this.props.children}
</div>
</div>
)
}
})
const Index = React.createClass({
render() {
return <h1>Address Book</h1>
}
})
const Contact = withRouter(
React.createClass({
getStateFromStore(props) {
const { id } = props ? props.params : this.props.params
return {
contact: ContactStore.getContact(id)
}
},
getInitialState() {
return this.getStateFromStore()
},
componentDidMount() {
ContactStore.addChangeListener(this.updateContact)
},
componentWillUnmount() {
ContactStore.removeChangeListener(this.updateContact)
},
componentWillReceiveProps(nextProps) {
this.setState(this.getStateFromStore(nextProps))
},
updateContact() {
this.setState(this.getStateFromStore())
},
destroy() {
const { id } = this.props.params
ContactStore.removeContact(id)
this.props.router.push('/')
},
render() {
const contact = this.state.contact || {}
const name = contact.first + ' ' + contact.last
const avatar = contact.avatar || 'http://placecage.com/50/50'
return (
<div className="Contact">
<img height="50" src={avatar} key={avatar} />
<h3>{name}</h3>
<button onClick={this.destroy}>Delete</button>
</div>
)
}
})
)
const NewContact = withRouter(
React.createClass({
createContact(event) {
event.preventDefault()
ContactStore.addContact({
first: findDOMNode(this.refs.first).value,
last: findDOMNode(this.refs.last).value
}, (contact) => {
this.props.router.push(`/contact/${contact.id}`)
})
},
render() {
return (
<form onSubmit={this.createContact}>
<p>
<input type="text" ref="first" placeholder="First name" />
<input type="text" ref="last" placeholder="Last name" />
</p>
<p>
<button type="submit">Save</button> <Link to="/">Cancel</Link>
</p>
</form>
)
}
})
)
const NotFound = React.createClass({
render() {
return <h2>Not found</h2>
}
})
render((
<Router history={withExampleBasename(browserHistory, __dirname)}>
<Route path="/" component={App}>
<IndexRoute component={Index} />
<Route path="contact/new" component={NewContact} />
<Route path="contact/:id" component={Contact} />
<Route path="*" component={NotFound} />
</Route>
</Router>
), document.getElementById('example'))
| examples/master-detail/app.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.9985288381576538,
0.11802531778812408,
0.00016477405733894557,
0.00016788933135103434,
0.3214462399482727
] |
{
"id": 4,
"code_window": [
" }\n",
"\n",
"})\n",
"\n",
"export default withRouter(Login)\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = withRouter(Login)"
],
"file_path": "examples/auth-with-shared-root/components/Login.js",
"type": "replace",
"edit_start_line_idx": 46
} | # API Reference
- [Components](#components)
- [`<Router>`](#router)
- [`<Link>`](#link)
- [`<IndexLink>`](#indexlink)
- [`withRouter`](#withroutercomponent)
- [`<RouterContext>`](#routercontext)
- [`context.router`](#contextrouter)
- `<RoutingContext>` (deprecated, use `<RouterContext>`)
- [Configuration Components](#configuration-components)
- [`<Route>`](#route)
- [`PlainRoute`](#plainroute)
- [`<Redirect>`](#redirect)
- [`<IndexRoute>`](#indexroute-1)
- [`<IndexRedirect>`](#indexredirect)
- [Route Components](#route-components)
- [Injected Props](#injected-props)
- [Named Components](#named-components)
- [Histories](#histories)
- [`browserHistory`](#browserhistory)
- [`hashHistory`](#hashhistory)
- [`createMemoryHistory()`](#creatememoryhistoryoptions)
- [`useRouterHistory()`](#userouterhistorycreatehistory)
- [Utilities](#utilities)
- [`match()`](#match-routes-location-history-options--cb)
- [`createRoutes()`](#createroutesroutes)
- [`PropTypes`](#proptypes)
- [`useRoutes()`](#useroutescreatehistory-deprecated) (deprecated)
- [Mixins](#mixins-deprecated) (deprecated)
- `Lifecycle` (deprecated)
- `History` (deprecated)
- `RouteContext` (deprecated)
## Components
### `<Router>`
Primary component of React Router. It keeps your UI and the URL in sync.
#### Props
##### `children` (required)
One or many [`<Route>`](#route)s or [`PlainRoute`](#plainroute)s. When the history changes, `<Router>` will match a branch of its routes, and render their configured [components](#routecomponent), with child route components nested inside the parents.
##### `routes`
Alias for `children`.
##### `history`
The history the router should listen to. Typically `browserHistory` or `hashHistory`.
```jsx
import { browserHistory } from 'react-router'
ReactDOM.render(<Router history={browserHistory} />, el)
```
##### `createElement(Component, props)`
When the router is ready to render a branch of route components, it will use this function to create the elements. You may want to take control of creating the elements when you're using some sort of data abstraction, like setting up subscriptions to stores, or passing in some sort of application module to each component via props.
```jsx
<Router createElement={createElement} />
// default behavior
function createElement(Component, props) {
// make sure you pass all the props in!
return <Component {...props} />
}
// maybe you're using something like Relay
function createElement(Component, props) {
// make sure you pass all the props in!
return <RelayContainer Component={Component} routerProps={props} />
}
```
##### `onError(error)`
While the router is matching, errors may bubble up, here is your opportunity to catch and deal with them. Typically these will come from async features like [`route.getComponents`](#getcomponentsnextstate-callback), [`route.getIndexRoute`](#getindexroutepartialnextstate-callback), and [`route.getChildRoutes`](#getchildroutespartialnextstate-callback).
##### `onUpdate()`
Called whenever the router updates its state in response to URL changes.
##### `render(props)`
This is primarily for integrating with other libraries that need to participate in rendering before the route components are rendered. It defaults to `render={(props) => <RouterContext {...props} />}`.
Ensure that you render a `<RouterContext>` at the end of the line, passing all the props passed to `render`.
##### `stringifyQuery(queryObject)` (deprecated)
##### `parseQueryString(queryString)` (deprecated)
#### Examples
Please see the [`examples/`](/examples) directory of the repository for extensive examples of using `<Router>`.
### `<Link>`
The primary way to allow users to navigate around your application. `<Link>` will render a fully accessible anchor tag with the proper href.
A `<Link>` can know when the route it links to is active and automatically apply an `activeClassName` and/or `activeStyle` when given either prop. The `<Link>` will be active if the current route is either the linked route or any descendant of the linked route. To have the link be active only on the exact linked route, use [`<IndexLink>`](#indexlink) instead or set the `onlyActiveOnIndex` prop.
#### Props
##### `to`
A [location descriptor](https://github.com/ReactTraining/history/blob/master/docs/Glossary.md#locationdescriptor). Usually this is a string or an object, with the following semantics:
* If it's a string it represents the absolute path to link to, e.g. `/users/123` (relative paths are not supported).
* If it's an object it can have four keys:
* `pathname`: A string representing the path to link to.
* `query`: An object of key:value pairs to be stringified.
* `hash`: A hash to put in the URL, e.g. `#a-hash`.
* `state`: State to persist to the `location`.
_Note: React Router currently does not manage scroll position, and will not scroll to the element corresponding to `hash`._
##### `activeClassName`
The className a `<Link>` receives when its route is active. No active class by default.
##### `activeStyle`
The styles to apply to the link element when its route is active.
##### `onClick(e)`
A custom handler for the click event. Works just like a handler on an `<a>` tag - calling `e.preventDefault()` will prevent the transition from firing, while `e.stopPropagation()` will prevent the event from bubbling.
##### `onlyActiveOnIndex`
If `true`, the `<Link>` will only be active when the current route exactly matches the linked route.
##### *others*
You can also pass props you'd like to be on the `<a>` such as a `title`, `id`, `className`, etc.
##### `query` **([deprecated](/upgrade-guides/v2.0.0.md#link-to-onenter-and-isactive-use-location-descriptors) see `to`)**
##### `hash` **([deprecated](/upgrade-guides/v2.0.0.md#link-to-onenter-and-isactive-use-location-descriptors) see `to`)**
##### `state` **([deprecated](/upgrade-guides/v2.0.0.md#link-to-onenter-and-isactive-use-location-descriptors) see `to`)**
#### Example
Given a route like `<Route path="/users/:userId" />`:
```jsx
<Link to={`/users/${user.id}`} activeClassName="active">{user.name}</Link>
// becomes one of these depending on your History and if the route is
// active
<a href="/users/123" class="active">Michael</a>
<a href="#/users/123">Michael</a>
// change the activeClassName
<Link to={`/users/${user.id}`} activeClassName="current">{user.name}</Link>
// change style when link is active
<Link to="/users" style={{color: 'white'}} activeStyle={{color: 'red'}}>Users</Link>
```
### `<IndexLink>`
An `<IndexLink>` is like a [`<Link>`](#link), except it is only active when the current route is exactly the linked route. It is equivalent to `<Link>` with the `onlyActiveOnIndex` prop set.
### `withRouter(component)`
A HoC (higher-order component) that wraps another component to provide `this.props.router`. Pass in your component and it will return the wrapped component.
### `<RouterContext>`
A `<RouterContext>` renders the component tree for a given router state. Its used by `<Router>` but also useful for server rendering and integrating in brownfield development.
It also provides a `router` object on [context](https://facebook.github.io/react/docs/context.html).
#### `context.router`
Contains data and methods relevant to routing. Most useful for imperatively transitioning around the application.
##### `push(pathOrLoc)`
Transitions to a new URL, adding a new entry in the browser history.
```jsx
router.push('/users/12')
// or with a location descriptor object
router.push({
pathname: '/users/12',
query: { modal: true },
state: { fromDashboard: true }
})
```
##### `replace(pathOrLoc)`
Identical to `push` except replaces the current history entry with a new one.
##### `go(n)`
Go forward or backward in the history by `n` or `-n`.
##### `goBack()`
Go back one entry in the history.
##### `goForward()`
Go forward one entry in the history.
##### `setRouteLeaveHook(route, hook)`
Registers the given hook function to run before leaving the given route.
During a normal transition, the hook function receives the next location as its only argument and can return either a prompt message (string) to show the user, to make sure they want to leave the page; or `false`, to prevent the transition. Any other return value will have no effect.
During the beforeunload event (in browsers) the hook receives no arguments.
In this case it must return a prompt message to prevent the transition.
Returns a function that may be used to unbind the listener.
You don't need to manually tear down the route leave hook in most cases. We automatically remove all attached route leave hooks after leaving the associated route.
##### `createPath(pathOrLoc, query)`
Stringifies the query into the pathname, using the router's config.
##### `createHref(pathOrLoc, query)`
Creates a URL, using the router's config. For example, it will add `#/` in front of the `pathname` for hash history.
##### `isActive(pathOrLoc, indexOnly)`
Returns `true` or `false` depending on if the `pathOrLoc` is active. Will be true for every route in the route branch matched (child route is active, therefore parent is too), unless `indexOnly` is specified, in which case it will only match the exact path.
A route is only considered active if all the URL parameters match, including optional parameters and their presence or absence.
However, only explicitly specified query parameters will be checked. That means that `isActive({ pathname: '/foo', query: { a: 'b' } })` will return `true` when the location is `/foo?a=b&c=d`. To require that a query parameter be absent, specify its value as an explicit `undefined`, e.g. `isActive({ pathname: '/foo', query: { a: 'b', c: undefined } })`, which would be `false` in this example.
## Configuration Components
### `<Route>`
A `<Route>` is used to declaratively map routes to your application's
component hierarchy.
#### Props
##### `path`
The path used in the URL.
It will concat with the parent route's path unless it starts with `/`,
making it an absolute path.
**Note**: Absolute paths may not be used in route config that is [dynamically loaded](/docs/guides/DynamicRouting.md).
If left undefined, the router will try to match the child routes.
##### `component`
A single component to be rendered when the route matches the URL. It can
be rendered by the parent route component with `this.props.children`.
```jsx
const routes = (
<Route path="/" component={App}>
<Route path="groups" component={Groups} />
<Route path="users" component={Users} />
</Route>
)
class App extends React.Component {
render () {
return (
<div>
{/* this will be either <Users> or <Groups> */}
{this.props.children}
</div>
)
}
}
```
##### `components`
Routes can define one or more named components as an object of `[name]: component` pairs to be rendered when the path matches the URL. They can be rendered by the parent route component with `this.props[name]`.
```jsx
// Think of it outside the context of the router - if you had pluggable
// portions of your `render`, you might do it like this:
// <App main={<Users />} sidebar={<UsersSidebar />} />
const routes = (
<Route path="/" component={App}>
<Route path="groups" components={{main: Groups, sidebar: GroupsSidebar}} />
<Route path="users" components={{main: Users, sidebar: UsersSidebar}}>
<Route path=":userId" component={Profile} />
</Route>
</Route>
)
class App extends React.Component {
render () {
const { main, sidebar } = this.props
return (
<div>
<div className="Main">
{main}
</div>
<div className="Sidebar">
{sidebar}
</div>
</div>
)
}
}
class Users extends React.Component {
render () {
return (
<div>
{/* if at "/users/123" `children` will be <Profile> */}
{/* UsersSidebar will also get <Profile> as this.props.children,
so its a little weird, but you can decide which one wants
to continue with the nesting */}
{this.props.children}
</div>
)
}
}
```
##### `getComponent(nextState, callback)`
Same as `component` but asynchronous, useful for code-splitting.
###### `callback` signature
`cb(err, component)`
```jsx
<Route path="courses/:courseId" getComponent={(nextState, cb) => {
// do asynchronous stuff to find the components
cb(null, Course)
}} />
```
##### `getComponents(nextState, callback)`
Same as `components` but asynchronous, useful for
code-splitting.
###### `callback` signature
`cb(err, components)`
```jsx
<Route path="courses/:courseId" getComponents={(nextState, cb) => {
// do asynchronous stuff to find the components
cb(null, {sidebar: CourseSidebar, content: Course})
}} />
```
##### `children`
Routes can be nested, `this.props.children` will contain the element created from the child route component. Please refer to the [Route Configuration](/docs/guides/RouteConfiguration.md) since this is a very critical part of the router's design.
##### `onEnter(nextState, replace, callback?)`
Called when a route is about to be entered. It provides the next router state and a function to redirect to another path. `this` will be the route instance that triggered the hook.
If `callback` is listed as a 3rd argument, this hook will run asynchronously, and the transition will block until `callback` is called.
###### `callback` signature
`cb(err)`
```jsx
const userIsInATeam = (nextState, replace, callback) => {
fetch(...)
.then(response = response.json())
.then(userTeams => {
if (userTeams.length === 0) {
replace(`/users/${nextState.params.userId}/teams/new`)
}
callback();
})
.catch(error => {
// do some error handling here
callback(error);
})
}
<Route path="/users/:userId/teams" onEnter={userIsInATeam} />
```
##### `onChange(prevState, nextState, replace, callback?)`
Called on routes when the location changes, but the route itself neither enters or leaves. For example, this will be called when a route's children change, or when the location query changes. It provides the previous router state, the next router state, and a function to redirect to another path. `this` will be the route instance that triggered the hook.
If `callback` is listed as a 4th argument, this hook will run asynchronously, and the transition will block until `callback` is called.
##### `onLeave(prevState)`
Called when a route is about to be exited.
### `PlainRoute`
A plain JavaScript object route definition. `<Router>` turns JSX `<Route>`s into these objects, but you can use them directly if you prefer. All of the props are the same as `<Route>` props, except those listed here.
#### Props
##### `childRoutes`
An array of child routes, same as `children` in JSX route configs.
##### `getChildRoutes(partialNextState, callback)`
Same as `childRoutes` but asynchronous and receives `partialNextState`. Useful for code-splitting and dynamic route matching (given some state or session data to return a different set of child routes).
###### `callback` signature
`cb(err, routesArray)`
```jsx
let myRoute = {
path: 'course/:courseId',
childRoutes: [
announcementsRoute,
gradesRoute,
assignmentsRoute
]
}
// async child routes
let myRoute = {
path: 'course/:courseId',
getChildRoutes(location, cb) {
// do asynchronous stuff to find the child routes
cb(null, [ announcementsRoute, gradesRoute, assignmentsRoute ])
}
}
// navigation dependent child routes
// can link with some state
<Link to="/picture/123" state={{ fromDashboard: true }} />
let myRoute = {
path: 'picture/:id',
getChildRoutes(partialNextState, cb) {
let { state } = partialNextState
if (state && state.fromDashboard) {
cb(null, [dashboardPictureRoute])
} else {
cb(null, [pictureRoute])
}
}
}
```
##### `indexRoute`
The [index route](/docs/guides/IndexRoutes.md). This is the same as specifying an `<IndexRoute>` child when using JSX route configs.
##### `getIndexRoute(partialNextState, callback)`
Same as `indexRoute`, but asynchronous and receives `partialNextState`. As with `getChildRoutes`, this can be useful for code-splitting and dynamic route matching.
###### `callback` signature
`cb(err, route)`
```jsx
// For example:
let myIndexRoute = {
component: MyIndex
}
let myRoute = {
path: 'courses',
indexRoute: myIndexRoute
}
// async index route
let myRoute = {
path: 'courses',
getIndexRoute(partialNextState, cb) {
// do something async here
cb(null, myIndexRoute)
}
}
```
### `<Redirect>`
A `<Redirect>` sets up a redirect to another route in your application to maintain old URLs.
#### Props
##### `from`
The path you want to redirect from, including dynamic segments.
##### `to`
The path you want to redirect to.
##### `query`
By default, the query parameters will just pass through but you can specify them if you need to.
```jsx
// Say we want to change from `/profile/123` to `/about/123`
// and redirect `/get-in-touch` to `/contact`
<Route component={App}>
<Route path="about/:userId" component={UserProfile} />
{/* /profile/123 -> /about/123 */}
<Redirect from="profile/:userId" to="about/:userId" />
</Route>
```
Note that the `<Redirect>` can be placed anywhere in the route hierarchy, though [normal precedence](/docs/guides/RouteMatching.md#precedence) rules apply. If you'd prefer the redirects to be next to their respective routes, the `from` path will match the same as a regular route `path`.
```jsx
<Route path="course/:courseId">
<Route path="dashboard" />
{/* /course/123/home -> /course/123/dashboard */}
<Redirect from="home" to="dashboard" />
</Route>
```
### `<IndexRoute>`
An `<IndexRoute>` allows you to provide a default "child" to a parent route when visitor is at the URL of the parent.
Please see the [Index Routes guide](/docs/guides/IndexRoutes.md).
#### Props
All the same props as [Route](#route) except for `path`.
### `<IndexRedirect>`
An `<IndexRedirect>` allows you to redirect from the URL of a parent route to another route. They can be used to allow a child route to serve as the default route for its parent, while still keeping a distinct URL.
Please see the [Index Routes guide](/docs/guides/IndexRoutes.md).
#### Props
All the same props as [Redirect](#redirect) except for `from`.
## Route Components
A route's component is rendered when that route matches the URL. The router will inject the following properties into your component when it's rendered:
### Injected Props
#### `location`
The current [location](https://github.com/reactjs/history/blob/master/docs/Location.md).
#### `params`
The dynamic segments of the URL.
#### `route`
The route that rendered this component.
#### `routeParams`
A subset of `this.props.params` that were directly specified in this component's route. For example, if the route's path is `users/:userId` and the URL is `/users/123/portfolios/345` then `this.props.routeParams` will be `{userId: '123'}`, and `this.props.params` will be `{userId: '123', portfolioId: 345}`.
#### `children`
The matched child route element to be rendered. If the route has [named components](/docs/API.md#named-components) then this will be undefined, and the components will instead be available as direct properties on `this.props`.
##### Example
```jsx
render((
<Router>
<Route path="/" component={App}>
<Route path="groups" component={Groups} />
<Route path="users" component={Users} />
</Route>
</Router>
), node)
class App extends React.Component {
render() {
return (
<div>
{/* this will be either <Users> or <Groups> */}
{this.props.children}
</div>
)
}
}
```
#### `history` (deprecated)
### Named Components
When a route has one or more named components, the child elements are available by name on `this.props`. In this case `this.props.children` will be undefined. All route components can participate in the nesting.
#### Example
```jsx
render((
<Router>
<Route path="/" component={App}>
<Route path="groups" components={{main: Groups, sidebar: GroupsSidebar}} />
<Route path="users" components={{main: Users, sidebar: UsersSidebar}}>
<Route path="users/:userId" component={Profile} />
</Route>
</Route>
</Router>
), node)
class App extends React.Component {
render() {
// the matched child route components become props in the parent
return (
<div>
<div className="Main">
{/* this will either be <Groups> or <Users> */}
{this.props.main}
</div>
<div className="Sidebar">
{/* this will either be <GroupsSidebar> or <UsersSidebar> */}
{this.props.sidebar}
</div>
</div>
)
}
}
class Users extends React.Component {
render() {
return (
<div>
{/* if at "/users/123" this will be <Profile> */}
{/* UsersSidebar will also get <Profile> as this.props.children.
You can pick where it renders */}
{this.props.children}
</div>
)
}
}
```
## Histories
For more details, please see the [histories guide](/docs/guides/Histories.md).
### `browserHistory`
`browserHistory` uses the HTML5 History API when available, and falls back to full refreshes otherwise. `browserHistory` requires additional configuration on the server side to serve up URLs, but is the generally preferred solution for modern web pages.
### `hashHistory`
`hashHistory` uses URL hashes, along with a query key to keep track of state. `hashHistory` requires no additional server configuration, but is generally less preferred than `browserHistory`.
### `createMemoryHistory(options)`
`createMemoryHistory` creates an in-memory `history` object that does not interact with the browser URL. This is useful for when you need to customize the `history` object used for server-side rendering, for automated testing, or for when you do not want to manipulate the browser URL, such as when your application is embedded in an `<iframe>`.
### `useRouterHistory(createHistory)`
`useRouterHistory` is a `history` enhancer that configures a given `createHistory` factory to work with React Router. This allows using custom histories in addition to the bundled singleton histories.
It also pre-enhances the history with the
[useQueries](https://github.com/ReactTraining/history/blob/master/docs/QuerySupport.md)
and
[useBasename](https://github.com/ReactTraining/history/blob/master/docs/BasenameSupport.md)
enhancers from `history`
#### Example
```jsx
import createHashHistory from 'history/lib/createHashHistory'
const history = useRouterHistory(createHashHistory)({ queryKey: false })
```
## Utilities
### `match({ routes, location, [history], ...options }, cb)`
This function is to be used for server-side rendering. It matches a set of routes to a location, without rendering, and calls a `callback(error, redirectLocation, renderProps)` when it's done.
The function will create a `history` for you, passing the additional `options` along to create it. These options can include `basename` to control the base name for URLs, as well as the pair of `parseQueryString` and `stringifyQuery` to control query string parsing and serializing. You can also pass in an already instantiated `history` object, which can be constructed however you like.
The three arguments to the callback function you pass to `match` are:
- `error`: A Javascript `Error` object if an error occurred, `undefined` otherwise.
- `redirectLocation`: A [Location](/docs/Glossary.md#location) object if the route is a redirect, `undefined` otherwise.
- `renderProps`: The props you should pass to the routing context if the route matched, `undefined` otherwise.
If all three parameters are `undefined`, this means that there was no route found matching the given location.
*Note: You probably don't want to use this in a browser unless you're doing server-side rendering of async routes.*
### `createRoutes(routes)`
Creates and returns an array of routes from the given object which may be a JSX route, a plain object route, or an array of either.
#### params
##### `routes`
One or many [`<Route>`](#route)s or [`PlainRoute`](#plainroute)s.
### `PropTypes`
The following prop types are exported at top level and from `react-router/lib/PropTypes`:
- `routerShape`: Shape for the `router` object on context
- `locationShape`: Shape for the `location` object on route component props
Previously, a number of prop types intended for internal use were also exported under `PropTypes`. These are deprecated and should not be used.
### `useRoutes(createHistory)` (deprecated)
## Mixins (deprecated)
Deprecated, please see the [upgrade guide](/upgrade-guides/v2.0.0.md#mixins-are-deprecated).
| docs/API.md | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.014026597142219543,
0.0006216107867658138,
0.00016124632384162396,
0.0001693950907792896,
0.0018639827612787485
] |
{
"id": 5,
"code_window": [
" return <p>You are now logged out</p>\n",
" }\n",
"})\n",
"\n",
"export default Logout\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Logout"
],
"file_path": "examples/auth-with-shared-root/components/Logout.js",
"type": "replace",
"edit_start_line_idx": 13
} | import React from 'react'
import auth from '../utils/auth'
const Logout = React.createClass({
componentDidMount() {
auth.logout()
},
render() {
return <p>You are now logged out</p>
}
})
export default Logout
| examples/auth-with-shared-root/components/Logout.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.20550872385501862,
0.11417955905199051,
0.0228503979742527,
0.11417955905199051,
0.0913291648030281
] |
{
"id": 5,
"code_window": [
" return <p>You are now logged out</p>\n",
" }\n",
"})\n",
"\n",
"export default Logout\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Logout"
],
"file_path": "examples/auth-with-shared-root/components/Logout.js",
"type": "replace",
"edit_start_line_idx": 13
} | import { canUseMembrane } from './deprecateObjectProperties'
import warning from './routerWarning'
export default function makeStateWithLocation(state, location) {
if (__DEV__ && canUseMembrane) {
const stateWithLocation = { ...state }
// I don't use deprecateObjectProperties here because I want to keep the
// same code path between development and production, in that we just
// assign extra properties to the copy of the state object in both cases.
for (const prop in location) {
if (!Object.prototype.hasOwnProperty.call(location, prop)) {
continue
}
Object.defineProperty(stateWithLocation, prop, {
get() {
warning(false, 'Accessing location properties directly from the first argument to `getComponent`, `getComponents`, `getChildRoutes`, and `getIndexRoute` is deprecated. That argument is now the router state (`nextState` or `partialNextState`) rather than the location. To access the location, use `nextState.location` or `partialNextState.location`.')
return location[prop]
}
})
}
return stateWithLocation
}
return { ...state, ...location }
}
| modules/makeStateWithLocation.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0001696566177997738,
0.00016783915634732693,
0.00016663163842167705,
0.00016722916916478425,
0.000001308096670982195
] |
{
"id": 5,
"code_window": [
" return <p>You are now logged out</p>\n",
" }\n",
"})\n",
"\n",
"export default Logout\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Logout"
],
"file_path": "examples/auth-with-shared-root/components/Logout.js",
"type": "replace",
"edit_start_line_idx": 13
} | /* components */
export Router from './Router'
export Link from './Link'
export IndexLink from './IndexLink'
export withRouter from './withRouter'
/* components (configuration) */
export IndexRedirect from './IndexRedirect'
export IndexRoute from './IndexRoute'
export Redirect from './Redirect'
export Route from './Route'
/* mixins */
export History from './History'
export Lifecycle from './Lifecycle'
export RouteContext from './RouteContext'
/* utils */
export useRoutes from './useRoutes'
export { createRoutes } from './RouteUtils'
export RouterContext from './RouterContext'
export RoutingContext from './RoutingContext'
export PropTypes, { locationShape, routerShape } from './PropTypes'
export match from './match'
export useRouterHistory from './useRouterHistory'
export { formatPattern } from './PatternUtils'
export applyRouterMiddleware from './applyRouterMiddleware'
/* histories */
export browserHistory from './browserHistory'
export hashHistory from './hashHistory'
export createMemoryHistory from './createMemoryHistory'
| modules/index.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00016895261069294065,
0.00016641583351884037,
0.00016489349945914,
0.0001659085974097252,
0.00000155975806137576
] |
{
"id": 5,
"code_window": [
" return <p>You are now logged out</p>\n",
" }\n",
"})\n",
"\n",
"export default Logout\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = Logout"
],
"file_path": "examples/auth-with-shared-root/components/Logout.js",
"type": "replace",
"edit_start_line_idx": 13
} | /*globals COURSES:true */
import React, { Component } from 'react'
import { Link } from 'react-router'
class Sidebar extends Component {
render() {
let { assignments } = COURSES[this.props.params.courseId]
return (
<div>
<h3>Sidebar Assignments</h3>
<ul>
{assignments.map(assignment => (
<li key={assignment.id}>
<Link to={`/course/${this.props.params.courseId}/assignments/${assignment.id}`}>
{assignment.title}
</Link>
</li>
))}
</ul>
</div>
)
}
}
module.exports = Sidebar
| examples/huge-apps/routes/Course/routes/Assignments/components/Sidebar.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017383246449753642,
0.00017215039406437427,
0.00016965143731795251,
0.0001729672512738034,
0.00000180198048838065
] |
{
"id": 6,
"code_window": [
" render() {\n",
" return <h2>Page One!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageOne\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageOne"
],
"file_path": "examples/auth-with-shared-root/components/PageOne.js",
"type": "replace",
"edit_start_line_idx": 8
} | import React from 'react'
import auth from '../utils/auth'
const Logout = React.createClass({
componentDidMount() {
auth.logout()
},
render() {
return <p>You are now logged out</p>
}
})
export default Logout
| examples/auth-with-shared-root/components/Logout.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00024921397562138736,
0.00020814347954001278,
0.0001670729834586382,
0.00020814347954001278,
0.000041070496081374586
] |
{
"id": 6,
"code_window": [
" render() {\n",
" return <h2>Page One!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageOne\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageOne"
],
"file_path": "examples/auth-with-shared-root/components/PageOne.js",
"type": "replace",
"edit_start_line_idx": 8
} | {
"env": {
"mocha": true
},
"rules": {
"react/prop-types": 0
}
}
| modules/__tests__/.eslintrc | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017239246517419815,
0.00017239246517419815,
0.00017239246517419815,
0.00017239246517419815,
0
] |
{
"id": 6,
"code_window": [
" render() {\n",
" return <h2>Page One!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageOne\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageOne"
],
"file_path": "examples/auth-with-shared-root/components/PageOne.js",
"type": "replace",
"edit_start_line_idx": 8
} | import expect, { spyOn } from 'expect'
import React, { Component } from 'react'
import { render, unmountComponentAtNode } from 'react-dom'
import createHistory from '../createMemoryHistory'
import { routerShape } from '../PropTypes'
import execSteps from './execSteps'
import Router from '../Router'
describe('When a router enters a branch', function () {
let
node,
newsLeaveHookSpy, removeNewsLeaveHook, userLeaveHookSpy,
DashboardRoute, NewsFeedRoute, InboxRoute, RedirectToInboxRoute, MessageRoute, UserRoute, AssignmentRoute,
routes
beforeEach(function () {
node = document.createElement('div')
newsLeaveHookSpy = expect.createSpy()
userLeaveHookSpy = expect.createSpy()
class Dashboard extends Component {
render() {
return (
<div className="Dashboard">
<h1>The Dashboard</h1>
{this.props.children}
</div>
)
}
}
class NewsFeed extends Component {
componentWillMount() {
removeNewsLeaveHook = this.context.router.setRouteLeaveHook(
this.props.route,
() => newsLeaveHookSpy() // Break reference equality.
)
}
render() {
return <div>News</div>
}
}
NewsFeed.contextTypes = {
router: routerShape.isRequired
}
class Inbox extends Component {
render() {
return <div>Inbox</div>
}
}
class UserAssignment extends Component {
render() {
return <div>assignment {this.props.params.assignmentId}</div>
}
}
class User extends Component {
componentWillMount() {
this.context.router.setRouteLeaveHook(
this.props.route,
userLeaveHookSpy
)
}
render() {
return <div>User {this.props.params.userId} {this.props.children}</div>
}
}
User.contextTypes = {
router: routerShape.isRequired
}
NewsFeedRoute = {
path: 'news',
component: NewsFeed,
onEnter(nextState, replace) {
expect(this).toBe(NewsFeedRoute)
expect(nextState.routes).toContain(NewsFeedRoute)
expect(replace).toBeA('function')
},
onChange(prevState, nextState, replace) {
expect(this).toBe(NewsFeedRoute)
expect(prevState).toNotEqual(nextState)
expect(prevState.routes).toContain(NewsFeedRoute)
expect(nextState.routes).toContain(NewsFeedRoute)
expect(replace).toBeA('function')
},
onLeave(prevState) {
expect(this).toBe(NewsFeedRoute)
expect(prevState.routes).toContain(NewsFeedRoute)
}
}
InboxRoute = {
path: 'inbox',
component: Inbox,
onEnter(nextState, replace) {
expect(this).toBe(InboxRoute)
expect(nextState.routes).toContain(InboxRoute)
expect(replace).toBeA('function')
},
onLeave(prevState) {
expect(this).toBe(InboxRoute)
expect(prevState.routes).toContain(InboxRoute)
}
}
RedirectToInboxRoute = {
path: 'redirect-to-inbox',
onEnter(nextState, replace) {
expect(this).toBe(RedirectToInboxRoute)
expect(nextState.routes).toContain(RedirectToInboxRoute)
expect(replace).toBeA('function')
replace('/inbox')
},
onLeave(prevState) {
expect(this).toBe(RedirectToInboxRoute)
expect(prevState.routes).toContain(RedirectToInboxRoute)
}
}
MessageRoute = {
path: 'messages/:messageID',
onEnter(nextState, replace) {
expect(this).toBe(MessageRoute)
expect(nextState.routes).toContain(MessageRoute)
expect(replace).toBeA('function')
},
onChange(prevState, nextState, replace) {
expect(this).toBe(MessageRoute)
expect(prevState.routes).toContain(MessageRoute)
expect(nextState.routes).toContain(MessageRoute)
expect(replace).toBeA('function')
},
onLeave(prevState) {
expect(this).toBe(MessageRoute)
expect(prevState.routes).toContain(MessageRoute)
}
}
AssignmentRoute = {
path: 'assignments/:assignmentId',
component: UserAssignment,
onEnter() { expect(this).toBe(AssignmentRoute) },
onLeave() { expect(this).toBe(AssignmentRoute) }
}
UserRoute = {
path: 'users/:userId',
component: User,
childRoutes: [ AssignmentRoute ],
onEnter() { expect(this).toBe(UserRoute) },
onLeave() { expect(this).toBe(UserRoute) }
}
DashboardRoute = {
path: '/',
component: Dashboard,
onEnter(nextState, replace) {
expect(this).toBe(DashboardRoute)
expect(nextState.routes).toContain(DashboardRoute)
expect(replace).toBeA('function')
},
onChange(prevState, nextState, replace) {
expect(this).toBe(DashboardRoute)
expect(prevState).toNotEqual(nextState)
expect(prevState.routes).toContain(DashboardRoute)
expect(nextState.routes).toContain(DashboardRoute)
expect(replace).toBeA('function')
},
onLeave(prevState) {
expect(this).toBe(DashboardRoute)
expect(prevState.routes).toContain(DashboardRoute)
},
childRoutes: [ NewsFeedRoute, InboxRoute, RedirectToInboxRoute, MessageRoute, UserRoute ]
}
routes = [
DashboardRoute
]
})
afterEach(function () {
unmountComponentAtNode(node)
})
it('calls the onEnter hooks of all routes in that branch', function (done) {
const dashboardRouteEnterSpy = spyOn(DashboardRoute, 'onEnter').andCallThrough()
const newsFeedRouteEnterSpy = spyOn(NewsFeedRoute, 'onEnter').andCallThrough()
render(<Router history={createHistory('/news')} routes={routes}/>, node, function () {
expect(dashboardRouteEnterSpy).toHaveBeenCalled()
expect(newsFeedRouteEnterSpy).toHaveBeenCalled()
done()
})
})
it('calls the route leave hooks when leaving the route', function (done) {
const history = createHistory('/news')
render(<Router history={history} routes={routes}/>, node, function () {
expect(newsLeaveHookSpy.calls.length).toEqual(0)
history.push('/inbox')
expect(newsLeaveHookSpy.calls.length).toEqual(1)
history.push('/news')
expect(newsLeaveHookSpy.calls.length).toEqual(1)
history.push('/inbox')
expect(newsLeaveHookSpy.calls.length).toEqual(2)
done()
})
})
it('does not call removed route leave hooks', function (done) {
const history = createHistory('/news')
render(<Router history={history} routes={routes}/>, node, function () {
removeNewsLeaveHook()
history.push('/inbox')
expect(newsLeaveHookSpy).toNotHaveBeenCalled()
done()
})
})
it('does not remove route leave hooks when changing params', function (done) {
const history = createHistory('/users/foo')
// Stub this function to exercise the code path.
history.listenBeforeUnload = () => (() => {})
render(<Router history={history} routes={routes}/>, node, function () {
expect(userLeaveHookSpy.calls.length).toEqual(0)
history.push('/users/bar')
expect(userLeaveHookSpy.calls.length).toEqual(1)
history.push('/users/baz')
expect(userLeaveHookSpy.calls.length).toEqual(2)
done()
})
})
describe('and one of the transition hooks navigates to another route', function () {
it('immediately transitions to the new route', function (done) {
const redirectRouteEnterSpy = spyOn(RedirectToInboxRoute, 'onEnter').andCallThrough()
const redirectRouteLeaveSpy = spyOn(RedirectToInboxRoute, 'onLeave').andCallThrough()
const inboxEnterSpy = spyOn(InboxRoute, 'onEnter').andCallThrough()
render(<Router history={createHistory('/redirect-to-inbox')} routes={routes}/>, node, function () {
expect(this.state.location.pathname).toEqual('/inbox')
expect(redirectRouteEnterSpy).toHaveBeenCalled()
expect(redirectRouteLeaveSpy.calls.length).toEqual(0)
expect(inboxEnterSpy).toHaveBeenCalled()
done()
})
})
})
describe('and then navigates to another branch', function () {
it('calls the onLeave hooks of all routes in the previous branch that are not in the next branch', function (done) {
const dashboardRouteLeaveSpy = spyOn(DashboardRoute, 'onLeave').andCallThrough()
const inboxRouteEnterSpy = spyOn(InboxRoute, 'onEnter').andCallThrough()
const inboxRouteLeaveSpy = spyOn(InboxRoute, 'onLeave').andCallThrough()
const history = createHistory('/inbox')
const steps = [
function () {
expect(inboxRouteEnterSpy).toHaveBeenCalled('InboxRoute.onEnter was not called')
history.push('/news')
},
function () {
expect(inboxRouteLeaveSpy).toHaveBeenCalled('InboxRoute.onLeave was not called')
expect(dashboardRouteLeaveSpy.calls.length).toEqual(0, 'DashboardRoute.onLeave was called')
}
]
const execNextStep = execSteps(steps, done)
render(
<Router history={history}
routes={routes}
onUpdate={execNextStep}
/>, node, execNextStep)
})
})
describe('and then navigates to the same branch, but with different params', function () {
it('calls the onLeave and onEnter hooks of all routes whose params have changed', function (done) {
const dashboardRouteLeaveSpy = spyOn(DashboardRoute, 'onLeave').andCallThrough()
const dashboardRouteChangeSpy = spyOn(DashboardRoute, 'onChange').andCallThrough()
const dashboardRouteEnterSpy = spyOn(DashboardRoute, 'onEnter').andCallThrough()
const messageRouteLeaveSpy = spyOn(MessageRoute, 'onLeave').andCallThrough()
const messageRouteChangeSpy = spyOn(MessageRoute, 'onChange').andCallThrough()
const messageRouteEnterSpy = spyOn(MessageRoute, 'onEnter').andCallThrough()
const history = createHistory('/messages/123')
const steps = [
function () {
expect(dashboardRouteEnterSpy).toHaveBeenCalled('DashboardRoute.onEnter was not called')
expect(messageRouteEnterSpy).toHaveBeenCalled('InboxRoute.onEnter was not called')
history.push('/messages/456')
},
function () {
expect(messageRouteLeaveSpy).toHaveBeenCalled('MessageRoute.onLeave was not called')
expect(messageRouteEnterSpy).toHaveBeenCalled('MessageRoute.onEnter was not called')
expect(messageRouteChangeSpy.calls.length).toEqual(0, 'DashboardRoute.onChange was called')
expect(dashboardRouteChangeSpy).toHaveBeenCalled('DashboardRoute.onChange was not called')
expect(dashboardRouteLeaveSpy.calls.length).toEqual(0, 'DashboardRoute.onLeave was called')
}
]
const execNextStep = execSteps(steps, done)
render(
<Router history={history}
routes={routes}
onUpdate={execNextStep}
/>, node, execNextStep)
})
})
describe('and then navigates to the same branch, but with different parent params', function () {
it('calls the onLeave and onEnter hooks of the parent and children', function (done) {
const parentLeaveSpy = spyOn(UserRoute, 'onLeave').andCallThrough()
const parentEnterSpy = spyOn(UserRoute, 'onEnter').andCallThrough()
const childLeaveSpy = spyOn(AssignmentRoute, 'onLeave').andCallThrough()
const childEnterSpy = spyOn(AssignmentRoute, 'onEnter').andCallThrough()
const history = createHistory('/users/123/assignments/456')
const steps = [
function () {
expect(parentEnterSpy).toHaveBeenCalled()
expect(childEnterSpy).toHaveBeenCalled()
history.push('/users/789/assignments/456')
},
function () {
expect(parentLeaveSpy).toHaveBeenCalled()
expect(childLeaveSpy).toHaveBeenCalled()
expect(parentEnterSpy).toHaveBeenCalled()
expect(childEnterSpy).toHaveBeenCalled()
}
]
const execNextStep = execSteps(steps, done)
render(
<Router history={history}
routes={routes}
onUpdate={execNextStep}
/>, node, execNextStep)
})
})
describe('and then the query changes', function () {
it('calls the onEnter hooks of all routes in that branch', function (done) {
const newsFeedRouteEnterSpy = spyOn(NewsFeedRoute, 'onEnter').andCallThrough()
const newsFeedRouteChangeSpy = spyOn(NewsFeedRoute, 'onChange').andCallThrough()
const history = createHistory('/inbox')
render(<Router history={history} routes={routes}/>, node, function () {
history.push({ pathname: '/news', query: { q: 1 } })
expect(newsFeedRouteChangeSpy.calls.length).toEqual(0, 'NewsFeedRoute.onChange was called')
expect(newsFeedRouteEnterSpy.calls.length).toEqual(1)
history.push({ pathname: '/news', query: { q: 2 } })
expect(newsFeedRouteChangeSpy).toHaveBeenCalled('NewsFeedRoute.onChange was not called')
expect(newsFeedRouteEnterSpy.calls.length).toEqual(1)
done()
})
})
})
})
| modules/__tests__/transitionHooks-test.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.8812360763549805,
0.02390502765774727,
0.00016526230319868773,
0.00017271871911361814,
0.1409483402967453
] |
{
"id": 6,
"code_window": [
" render() {\n",
" return <h2>Page One!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageOne\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageOne"
],
"file_path": "examples/auth-with-shared-root/components/PageOne.js",
"type": "replace",
"edit_start_line_idx": 8
} | import assert from 'assert'
import expect from 'expect'
import React from 'react'
import { render, unmountComponentAtNode } from 'react-dom'
import useRouterHistory from '../useRouterHistory'
import createHistory from 'history/lib/createMemoryHistory'
import Redirect from '../Redirect'
import Router from '../Router'
import Route from '../Route'
describe('useRouterHistory', function () {
it('adds backwards compatibility flag', function () {
const history = useRouterHistory(createHistory)()
expect(history.__v2_compatible__).toBe(true)
})
it('passes along options, especially query parsing', function (done) {
const history = useRouterHistory(createHistory)({
stringifyQuery() {
assert(true)
done()
}
})
history.push({ pathname: '/', query: { test: true } })
})
describe('when using basename', function () {
let node
beforeEach(function () {
node = document.createElement('div')
})
afterEach(function () {
unmountComponentAtNode(node)
})
it('should regard basename', function (done) {
const pathnames = []
const basenames = []
const history = useRouterHistory(createHistory)({
entries: '/foo/notes/5',
basename: '/foo'
})
history.listen(function (location) {
pathnames.push(location.pathname)
basenames.push(location.basename)
})
render((
<Router history={history}>
<Route path="/messages/:id" />
<Redirect from="/notes/:id" to="/messages/:id" />
</Router>
), node, function () {
expect(pathnames).toEqual([ '/notes/5', '/messages/5' ])
expect(basenames).toEqual([ '/foo', '/foo' ])
expect(this.state.location.pathname).toEqual('/messages/5')
expect(this.state.location.basename).toEqual('/foo')
done()
})
})
})
})
| modules/__tests__/useRouterHistory-test.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0016652849735692143,
0.00041607144521549344,
0.00016563571989536285,
0.0001711344812065363,
0.0005156644620001316
] |
{
"id": 7,
"code_window": [
" render() {\n",
" return <h2>Page Two! Wooo!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageTwo\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageTwo"
],
"file_path": "examples/auth-with-shared-root/components/PageTwo.js",
"type": "replace",
"edit_start_line_idx": 8
} | import React from 'react'
import { withRouter } from 'react-router'
import auth from '../utils/auth.js'
const Login = React.createClass({
getInitialState() {
return {
error: false
}
},
handleSubmit(event) {
event.preventDefault()
const email = this.refs.email.value
const pass = this.refs.pass.value
auth.login(email, pass, (loggedIn) => {
if (!loggedIn)
return this.setState({ error: true })
const { location } = this.props
if (location.state && location.state.nextPathname) {
this.props.router.replace(location.state.nextPathname)
} else {
this.props.router.replace('/')
}
})
},
render() {
return (
<form onSubmit={this.handleSubmit}>
<label><input ref="email" placeholder="email" defaultValue="[email protected]" /></label>
<label><input ref="pass" placeholder="password" /></label> (hint: password1)<br />
<button type="submit">login</button>
{this.state.error && (
<p>Bad login information</p>
)}
</form>
)
}
})
export default withRouter(Login)
| examples/auth-with-shared-root/components/Login.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.001151399570517242,
0.00037746859015896916,
0.0001699905114946887,
0.0001743976172292605,
0.00038741642492823303
] |
{
"id": 7,
"code_window": [
" render() {\n",
" return <h2>Page Two! Wooo!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageTwo\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageTwo"
],
"file_path": "examples/auth-with-shared-root/components/PageTwo.js",
"type": "replace",
"edit_start_line_idx": 8
} | import React from 'react'
class Grades extends React.Component {
render() {
return (
<div>
<h2>Grades</h2>
</div>
)
}
}
module.exports = Grades
| examples/huge-apps/routes/Grades/components/Grades.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0050184037536382675,
0.0025989683344960213,
0.0001795327552827075,
0.0025989683344960213,
0.00241943565197289
] |
{
"id": 7,
"code_window": [
" render() {\n",
" return <h2>Page Two! Wooo!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageTwo\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageTwo"
],
"file_path": "examples/auth-with-shared-root/components/PageTwo.js",
"type": "replace",
"edit_start_line_idx": 8
} | # Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at [email protected]. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/
| CODE_OF_CONDUCT.md | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017630851652938873,
0.00017240733723156154,
0.00016571010928601027,
0.00017318394384346902,
0.0000032137263588083442
] |
{
"id": 7,
"code_window": [
" render() {\n",
" return <h2>Page Two! Wooo!</h2>\n",
" }\n",
"})\n",
"\n",
"export default PageTwo\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = PageTwo"
],
"file_path": "examples/auth-with-shared-root/components/PageTwo.js",
"type": "replace",
"edit_start_line_idx": 8
} | function resetHash(done) {
if (window.location.hash !== '') {
window.location.hash = ''
setTimeout(done, 10)
} else {
done()
}
}
export default resetHash
| modules/__tests__/resetHash.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00017928094894159585,
0.00017318922618869692,
0.000167097503435798,
0.00017318922618869692,
0.0000060917227528989315
] |
{
"id": 8,
"code_window": [
" }\n",
"})\n",
"\n",
"export default User\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = User"
],
"file_path": "examples/auth-with-shared-root/components/User.js",
"type": "replace",
"edit_start_line_idx": 8
} | import React from 'react'
import auth from '../utils/auth'
const Logout = React.createClass({
componentDidMount() {
auth.logout()
},
render() {
return <p>You are now logged out</p>
}
})
export default Logout
| examples/auth-with-shared-root/components/Logout.js | 1 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.00032999066752381623,
0.0002501898561604321,
0.00017038907390087843,
0.0002501898561604321,
0.0000798007968114689
] |
{
"id": 8,
"code_window": [
" }\n",
"})\n",
"\n",
"export default User\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = User"
],
"file_path": "examples/auth-with-shared-root/components/User.js",
"type": "replace",
"edit_start_line_idx": 8
} | <!--
Thank you for contributing!
Have a usage question?
======================
The issue tracker isn't the best place for usage questions. This format is not well-suited for Q&A, and questions here don't have as much visibility as they do elsewhere. Before you ask a question, here are some resources to get help first:
- Do the tutorial: https://github.com/reactjs/react-router-tutorial
- Read the docs: https://github.com/reactjs/react-router/tree/latest/docs
- Explore examples: https://github.com/reactjs/react-router/tree/latest/examples
- Look for/ask questions on stack overflow: https://stackoverflow.com/questions/ask?tags=react-router
- Ask in chat: https://discord.gg/0ZcbPKXt5bYaNQ46
Think you found a bug?
======================
The best bug report is a failing test in the repository as a pull request. Otherwise, please use the "BUG REPORT" template below.
Have a feature request?
=======================
Remove the template from below and provide thoughtful commentary *and code samples* on what this feature means for your product. What will it allow you to do that you can't do today? How will it make current work-arounds straightforward? What potential bugs and edge cases does it help to avoid? etc. Please keep it product-centric.
-->
<!-- BUG TEMPLATE -->
## Version
2.0.0
## Test Case
http://jsbin.com/sacerobuxi/edit?html,js,output
## Steps to reproduce
## Expected Behavior
## Actual Behavior
| ISSUE_TEMPLATE.md | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0001722066372167319,
0.00017094356007874012,
0.0001701095316093415,
0.00017072905029635876,
7.989660275597998e-7
] |
{
"id": 8,
"code_window": [
" }\n",
"})\n",
"\n",
"export default User\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = User"
],
"file_path": "examples/auth-with-shared-root/components/User.js",
"type": "replace",
"edit_start_line_idx": 8
} | es6
lib
umd
examples/**/*-bundle.js
node_modules
npm-debug.log
website/index.html
website/tags/*
coverage
| .gitignore | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0001685117749730125,
0.0001685117749730125,
0.0001685117749730125,
0.0001685117749730125,
0
] |
{
"id": 8,
"code_window": [
" }\n",
"})\n",
"\n",
"export default User\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace"
],
"after_edit": [
"module.exports = User"
],
"file_path": "examples/auth-with-shared-root/components/User.js",
"type": "replace",
"edit_start_line_idx": 8
} | import React from 'react'
import { render } from 'react-dom'
import { browserHistory, Router, Route, Link, withRouter } from 'react-router'
import withExampleBasename from '../withExampleBasename'
import './app.css'
const App = withRouter(
React.createClass({
getInitialState() {
return {
tacos: [
{ name: 'duck confit' },
{ name: 'carne asada' },
{ name: 'shrimp' }
]
}
},
addTaco() {
let name = prompt('taco name?')
this.setState({
tacos: this.state.tacos.concat({ name })
})
},
handleRemoveTaco(removedTaco) {
this.setState({
tacos: this.state.tacos.filter(function (taco) {
return taco.name != removedTaco
})
})
this.props.router.push('/')
},
render() {
let links = this.state.tacos.map(function (taco, i) {
return (
<li key={i}>
<Link to={`/taco/${taco.name}`}>{taco.name}</Link>
</li>
)
})
return (
<div className="App">
<button onClick={this.addTaco}>Add Taco</button>
<ul className="Master">
{links}
</ul>
<div className="Detail">
{this.props.children && React.cloneElement(this.props.children, {
onRemoveTaco: this.handleRemoveTaco
})}
</div>
</div>
)
}
})
)
const Taco = React.createClass({
remove() {
this.props.onRemoveTaco(this.props.params.name)
},
render() {
return (
<div className="Taco">
<h1>{this.props.params.name}</h1>
<button onClick={this.remove}>remove</button>
</div>
)
}
})
render((
<Router history={withExampleBasename(browserHistory, __dirname)}>
<Route path="/" component={App}>
<Route path="taco/:name" component={Taco} />
</Route>
</Router>
), document.getElementById('example'))
| examples/passing-props-to-children/app.js | 0 | https://github.com/remix-run/react-router/commit/46bbc84dde82c8a894479c6177e841b30ea029df | [
0.0001792059192666784,
0.00016889872495085,
0.00016556796617805958,
0.00016752106603235006,
0.0000038847601899760775
] |
{
"id": 0,
"code_window": [
" import type { StorybookConfig } from '@storybook/react-webpack5';\n",
"\n",
" const config: StorybookConfig = {\n",
" framework: { name: 'foo', options: { bar: require('baz') } },\n",
" }\n",
" export default config;\n",
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"otherField\": { \"name\": 'foo', options: { bar: require('baz') } },\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 771
} | /// <reference types="@types/jest" />;
import { dedent } from 'ts-dedent';
import { formatConfig, loadConfig } from './ConfigFile';
expect.addSnapshotSerializer({
print: (val: any) => val,
test: (val) => true,
});
const getField = (path: string[], source: string) => {
const config = loadConfig(source).parse();
return config.getFieldValue(path);
};
const setField = (path: string[], value: any, source: string) => {
const config = loadConfig(source).parse();
config.setFieldValue(path, value);
return formatConfig(config);
};
const removeField = (path: string[], source: string) => {
const config = loadConfig(source).parse();
config.removeField(path);
return formatConfig(config);
};
describe('ConfigFile', () => {
describe('getField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const foo = { builder: 'webpack5' }
`
)
).toBeUndefined();
});
it('missing field', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { foo: 'webpack5' }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { builder: 'webpack5' }
`
)
).toEqual('webpack5');
});
it('found object', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { builder: { name: 'webpack5' } }
`
)
).toEqual({ name: 'webpack5' });
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export const core = coreVar;
`
)
).toEqual('webpack5');
});
it('variable export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export const core = coreVar;
`
)
).toEqual('webpack5');
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
module.exports = { foo: { builder: 'webpack5' } }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
module.exports = { core: { builder: 'webpack5' } }
`
)
).toEqual('webpack5');
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const core = { builder: 'webpack5' };
module.exports = { core };
`
)
).toEqual('webpack5');
});
it('variable rename', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
module.exports = { core: coreVar };
`
)
).toEqual('webpack5');
});
it('variable exports', () => {
expect(
getField(
['stories'],
dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
stories: [{ directory: '../src', titlePrefix: 'Demo' }],
}
module.exports = config;
`
)
).toEqual([{ directory: '../src', titlePrefix: 'Demo' }]);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
export default { foo: { builder: 'webpack5' } }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
export default { core: { builder: 'webpack5' } }
`
)
).toEqual('webpack5');
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const core = { builder: 'webpack5' };
export default { core };
`
)
).toEqual('webpack5');
});
it('variable rename', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export default { core: coreVar };
`
)
).toEqual('webpack5');
});
it('variable exports', () => {
expect(
getField(
['stories'],
dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
stories: [{ directory: '../src', titlePrefix: 'Demo' }],
}
export default config;
`
)
).toEqual([{ directory: '../src', titlePrefix: 'Demo' }]);
});
});
});
describe('setField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const addons = [];
`
)
).toMatchInlineSnapshot(`
export const addons = [];
export const core = {
builder: "webpack5"
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const core = { foo: 'bar' };
`
)
).toMatchInlineSnapshot(`
export const core = {
foo: 'bar',
builder: 'webpack5'
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const core = { builder: 'webpack4' };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: 'webpack5'
};
`);
});
it('found object', () => {
expect(
setField(
['core', 'builder'],
{ name: 'webpack5' },
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: {
name: 'webpack5'
}
};
`);
});
it('variable export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
const coreVar = { builder: 'webpack4' };
export const core = coreVar;
`
)
).toMatchInlineSnapshot(`
const coreVar = {
builder: 'webpack5'
};
export const core = coreVar;
`);
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: [],
core: {
builder: "webpack5"
}
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
foo: 'bar',
builder: 'webpack5'
}
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
builder: 'webpack5'
}
};
`);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: [],
core: {
builder: "webpack5"
}
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
export default {
core: {
foo: 'bar',
builder: 'webpack5'
}
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {
builder: 'webpack5'
}
};
`);
});
});
describe('quotes', () => {
it('no quotes', () => {
expect(setField(['foo', 'bar'], 'baz', '')).toMatchInlineSnapshot(`
export const foo = {
bar: "baz"
};
`);
});
it('more single quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', 'b', "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', 'b', "c"];
export const foo = {
bar: 'baz'
};
`);
});
it('more double quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', "b", "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', "b", "c"];
export const foo = {
bar: "baz"
};
`);
});
});
});
describe('removeField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const addons = [];
`
)
).toMatchInlineSnapshot(`export const addons = [];`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { foo: 'bar' };
`
)
).toMatchInlineSnapshot(`
export const core = {
foo: 'bar'
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { builder: 'webpack4' };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('found object', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('nested object', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: {}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { 'builder': 'webpack4' };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('variable export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack4' };
export const core = coreVar;
`
)
).toMatchInlineSnapshot(`
const coreVar = {};
export const core = coreVar;
`);
});
it('root export variable', () => {
expect(
removeField(
['core'],
dedent`
export const core = { builder: { name: 'webpack4' } };
export const addons = [];
`
)
).toMatchInlineSnapshot(`export const addons = [];`);
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: []
};
`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
foo: 'bar'
}
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {}
};
`);
});
it('nested scalar', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
module.exports = { core: { builder: { name: 'webpack4' } } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
builder: {}
}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { 'core': { 'builder': 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
'core': {}
};
`);
});
it('root property', () => {
expect(
removeField(
['core'],
dedent`
module.exports = { core: { builder: { name: 'webpack4' } }, addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: []
};
`);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: []
};
`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
export default {
core: {
foo: 'bar'
}
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {}
};
`);
});
it('nested scalar', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
export default { core: { builder: { name: 'webpack4' } } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {
builder: {}
}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { 'core': { 'builder': 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
'core': {}
};
`);
});
it('root property', () => {
expect(
removeField(
['core'],
dedent`
export default { core: { builder: { name: 'webpack4' } }, addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: []
};
`);
});
});
describe('quotes', () => {
it('no quotes', () => {
expect(setField(['foo', 'bar'], 'baz', '')).toMatchInlineSnapshot(`
export const foo = {
bar: "baz"
};
`);
});
it('more single quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', 'b', "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', 'b', "c"];
export const foo = {
bar: 'baz'
};
`);
});
it('more double quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', "b", "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', "b", "c"];
export const foo = {
bar: "baz"
};
`);
});
});
});
describe('config helpers', () => {
describe('getNameFromPath', () => {
it(`supports string literal node`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: 'foo',
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toEqual('foo');
});
it(`supports object expression node with name property`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: { name: 'foo', options: { bar: require('baz') } },
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toEqual('foo');
});
it(`returns undefined when accessing a field that does not exist`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = { }
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toBeUndefined();
});
it(`throws an error when node is of unexpected type`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: makesNoSense(),
}
export default config;
`;
const config = loadConfig(source).parse();
expect(() => config.getNameFromPath(['framework'])).toThrowError(
`The given node must be a string literal or an object expression with a "name" property that is a string literal.`
);
});
});
describe('getNamesFromPath', () => {
it(`supports an array with string literal and object expression with name property`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
addons: [
'foo',
{ name: 'bar', options: {} },
]
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNamesFromPath(['addons'])).toEqual(['foo', 'bar']);
});
});
it(`returns undefined when accessing a field that does not exist`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = { }
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNamesFromPath(['addons'])).toBeUndefined();
});
});
});
| code/lib/csf-tools/src/ConfigFile.test.ts | 1 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.998066246509552,
0.10689271986484528,
0.00016367557691410184,
0.00017496747022960335,
0.30593040585517883
] |
{
"id": 0,
"code_window": [
" import type { StorybookConfig } from '@storybook/react-webpack5';\n",
"\n",
" const config: StorybookConfig = {\n",
" framework: { name: 'foo', options: { bar: require('baz') } },\n",
" }\n",
" export default config;\n",
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"otherField\": { \"name\": 'foo', options: { bar: require('baz') } },\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 771
} | name: Markdown Links Check
# runs every monday at 9 am
on:
schedule:
- cron: '0 9 * * 1'
jobs:
check-links:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: gaurav-nelson/github-action-markdown-link-check@v1
# checks all markdown files from important folders including all subfolders
with:
# only show errors that occur instead of successful links + errors
use-quiet-mode: 'yes'
# output full HTTP info for broken links
use-verbose-mode: 'yes'
config-file: '.github/workflows/markdown-link-check-config.json'
| .github/workflows/cron-weekly.yml | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.00017545558512210846,
0.00017131176718976349,
0.0001671679492574185,
0.00017131176718976349,
0.000004143817932344973
] |
{
"id": 0,
"code_window": [
" import type { StorybookConfig } from '@storybook/react-webpack5';\n",
"\n",
" const config: StorybookConfig = {\n",
" framework: { name: 'foo', options: { bar: require('baz') } },\n",
" }\n",
" export default config;\n",
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"otherField\": { \"name\": 'foo', options: { bar: require('baz') } },\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 771
} | ```ts
// .storybook/main.ts
// Replace your-framework with the framework you are using (e.g., react-webpack5, vue3-webpack5)
import type { StorybookConfig } from '@storybook/your-framework';
const config: StorybookConfig = {
stories: ['../src/**/*.mdx', '../src/**/*.stories.@(js|jsx|ts|tsx)'],
addons: [
'@storybook/addon-links',
'@storybook/addon-actions',
'@storybook/addon-viewport',
{
name: '@storybook/addon-docs',
options: {
configureJSX: true,
babelOptions: {},
transcludeMarkdown: true,
},
},
'@storybook/addon-controls',
'@storybook/addon-backgrounds',
'@storybook/addon-toolbars',
'@storybook/addon-measure',
'@storybook/addon-outline',
],
};
export default config;
```
| docs/snippets/common/storybook-main-full-individual-essentials-config.ts.mdx | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.8773372769355774,
0.22033058106899261,
0.00017036519420798868,
0.0019073426956310868,
0.37932562828063965
] |
{
"id": 0,
"code_window": [
" import type { StorybookConfig } from '@storybook/react-webpack5';\n",
"\n",
" const config: StorybookConfig = {\n",
" framework: { name: 'foo', options: { bar: require('baz') } },\n",
" }\n",
" export default config;\n",
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"otherField\": { \"name\": 'foo', options: { bar: require('baz') } },\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 771
} | ```ts
// List.stories.ts
import List from './List.vue';
import ListItem from './ListItem.vue';
//👇 Imports a specific story from ListItem stories
import { Unchecked } from './ListItem.stories';
import type { Meta, StoryObj } from '@storybook/vue';
const meta: Meta<typeof List> = {
/* 👇 The title prop is optional.
* See https://storybook.js.org/docs/7.0/vue/configure/overview#configure-story-loading
* to learn how to generate automatic titles
*/
title: 'List',
component: List,
};
export default meta;
type Story = StoryObj<typeof List>;
//👇 The ListTemplate construct will be spread to the existing stories.
const ListTemplate = {
render: (args, { argTypes }) => ({
components: { List, ListItem },
props: Object.keys(argTypes),
template: `
<List v-bind="$props">
<div v-for="item in items" :key="item.title">
<ListItem :item="item" />
</div>
</List>
`,
}),
};
export const Empty: Story = {
...ListTemplate,
args: {
items: [],
},
};
export const OneItem: Story = {
...ListTemplate,
args: {
items: [
{
...Unchecked.args,
},
],
},
};
```
| docs/snippets/vue/list-story-template.ts-2.ts.mdx | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.00017532965284772217,
0.0001686999894445762,
0.00016551413864362985,
0.00016756224795244634,
0.0000034204456369479885
] |
{
"id": 1,
"code_window": [
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n",
" });\n",
"\n",
" it(`returns undefined when accessing a field that does not exist`, () => {\n",
" const source = dedent`\n",
" import type { StorybookConfig } from '@storybook/react-webpack5';\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(config.getNameFromPath(['otherField'])).toEqual('foo');\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 776
} | /// <reference types="@types/jest" />;
import { dedent } from 'ts-dedent';
import { formatConfig, loadConfig } from './ConfigFile';
expect.addSnapshotSerializer({
print: (val: any) => val,
test: (val) => true,
});
const getField = (path: string[], source: string) => {
const config = loadConfig(source).parse();
return config.getFieldValue(path);
};
const setField = (path: string[], value: any, source: string) => {
const config = loadConfig(source).parse();
config.setFieldValue(path, value);
return formatConfig(config);
};
const removeField = (path: string[], source: string) => {
const config = loadConfig(source).parse();
config.removeField(path);
return formatConfig(config);
};
describe('ConfigFile', () => {
describe('getField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const foo = { builder: 'webpack5' }
`
)
).toBeUndefined();
});
it('missing field', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { foo: 'webpack5' }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { builder: 'webpack5' }
`
)
).toEqual('webpack5');
});
it('found object', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { builder: { name: 'webpack5' } }
`
)
).toEqual({ name: 'webpack5' });
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export const core = coreVar;
`
)
).toEqual('webpack5');
});
it('variable export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export const core = coreVar;
`
)
).toEqual('webpack5');
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
module.exports = { foo: { builder: 'webpack5' } }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
module.exports = { core: { builder: 'webpack5' } }
`
)
).toEqual('webpack5');
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const core = { builder: 'webpack5' };
module.exports = { core };
`
)
).toEqual('webpack5');
});
it('variable rename', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
module.exports = { core: coreVar };
`
)
).toEqual('webpack5');
});
it('variable exports', () => {
expect(
getField(
['stories'],
dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
stories: [{ directory: '../src', titlePrefix: 'Demo' }],
}
module.exports = config;
`
)
).toEqual([{ directory: '../src', titlePrefix: 'Demo' }]);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
export default { foo: { builder: 'webpack5' } }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
export default { core: { builder: 'webpack5' } }
`
)
).toEqual('webpack5');
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const core = { builder: 'webpack5' };
export default { core };
`
)
).toEqual('webpack5');
});
it('variable rename', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export default { core: coreVar };
`
)
).toEqual('webpack5');
});
it('variable exports', () => {
expect(
getField(
['stories'],
dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
stories: [{ directory: '../src', titlePrefix: 'Demo' }],
}
export default config;
`
)
).toEqual([{ directory: '../src', titlePrefix: 'Demo' }]);
});
});
});
describe('setField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const addons = [];
`
)
).toMatchInlineSnapshot(`
export const addons = [];
export const core = {
builder: "webpack5"
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const core = { foo: 'bar' };
`
)
).toMatchInlineSnapshot(`
export const core = {
foo: 'bar',
builder: 'webpack5'
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const core = { builder: 'webpack4' };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: 'webpack5'
};
`);
});
it('found object', () => {
expect(
setField(
['core', 'builder'],
{ name: 'webpack5' },
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: {
name: 'webpack5'
}
};
`);
});
it('variable export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
const coreVar = { builder: 'webpack4' };
export const core = coreVar;
`
)
).toMatchInlineSnapshot(`
const coreVar = {
builder: 'webpack5'
};
export const core = coreVar;
`);
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: [],
core: {
builder: "webpack5"
}
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
foo: 'bar',
builder: 'webpack5'
}
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
builder: 'webpack5'
}
};
`);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: [],
core: {
builder: "webpack5"
}
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
export default {
core: {
foo: 'bar',
builder: 'webpack5'
}
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {
builder: 'webpack5'
}
};
`);
});
});
describe('quotes', () => {
it('no quotes', () => {
expect(setField(['foo', 'bar'], 'baz', '')).toMatchInlineSnapshot(`
export const foo = {
bar: "baz"
};
`);
});
it('more single quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', 'b', "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', 'b', "c"];
export const foo = {
bar: 'baz'
};
`);
});
it('more double quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', "b", "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', "b", "c"];
export const foo = {
bar: "baz"
};
`);
});
});
});
describe('removeField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const addons = [];
`
)
).toMatchInlineSnapshot(`export const addons = [];`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { foo: 'bar' };
`
)
).toMatchInlineSnapshot(`
export const core = {
foo: 'bar'
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { builder: 'webpack4' };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('found object', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('nested object', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: {}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { 'builder': 'webpack4' };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('variable export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack4' };
export const core = coreVar;
`
)
).toMatchInlineSnapshot(`
const coreVar = {};
export const core = coreVar;
`);
});
it('root export variable', () => {
expect(
removeField(
['core'],
dedent`
export const core = { builder: { name: 'webpack4' } };
export const addons = [];
`
)
).toMatchInlineSnapshot(`export const addons = [];`);
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: []
};
`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
foo: 'bar'
}
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {}
};
`);
});
it('nested scalar', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
module.exports = { core: { builder: { name: 'webpack4' } } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
builder: {}
}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { 'core': { 'builder': 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
'core': {}
};
`);
});
it('root property', () => {
expect(
removeField(
['core'],
dedent`
module.exports = { core: { builder: { name: 'webpack4' } }, addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: []
};
`);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: []
};
`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
export default {
core: {
foo: 'bar'
}
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {}
};
`);
});
it('nested scalar', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
export default { core: { builder: { name: 'webpack4' } } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {
builder: {}
}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { 'core': { 'builder': 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
'core': {}
};
`);
});
it('root property', () => {
expect(
removeField(
['core'],
dedent`
export default { core: { builder: { name: 'webpack4' } }, addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: []
};
`);
});
});
describe('quotes', () => {
it('no quotes', () => {
expect(setField(['foo', 'bar'], 'baz', '')).toMatchInlineSnapshot(`
export const foo = {
bar: "baz"
};
`);
});
it('more single quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', 'b', "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', 'b', "c"];
export const foo = {
bar: 'baz'
};
`);
});
it('more double quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', "b", "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', "b", "c"];
export const foo = {
bar: "baz"
};
`);
});
});
});
describe('config helpers', () => {
describe('getNameFromPath', () => {
it(`supports string literal node`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: 'foo',
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toEqual('foo');
});
it(`supports object expression node with name property`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: { name: 'foo', options: { bar: require('baz') } },
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toEqual('foo');
});
it(`returns undefined when accessing a field that does not exist`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = { }
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toBeUndefined();
});
it(`throws an error when node is of unexpected type`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: makesNoSense(),
}
export default config;
`;
const config = loadConfig(source).parse();
expect(() => config.getNameFromPath(['framework'])).toThrowError(
`The given node must be a string literal or an object expression with a "name" property that is a string literal.`
);
});
});
describe('getNamesFromPath', () => {
it(`supports an array with string literal and object expression with name property`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
addons: [
'foo',
{ name: 'bar', options: {} },
]
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNamesFromPath(['addons'])).toEqual(['foo', 'bar']);
});
});
it(`returns undefined when accessing a field that does not exist`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = { }
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNamesFromPath(['addons'])).toBeUndefined();
});
});
});
| code/lib/csf-tools/src/ConfigFile.test.ts | 1 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.9980091452598572,
0.06135333329439163,
0.00016529540880583227,
0.0003212601877748966,
0.22503963112831116
] |
{
"id": 1,
"code_window": [
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n",
" });\n",
"\n",
" it(`returns undefined when accessing a field that does not exist`, () => {\n",
" const source = dedent`\n",
" import type { StorybookConfig } from '@storybook/react-webpack5';\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(config.getNameFromPath(['otherField'])).toEqual('foo');\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 776
} | import { StoryFn, Meta, moduleMetadata } from '@storybook/angular';
import { TokenComponent, ITEMS, DEFAULT_NAME } from './angular-src/token.component';
export const Story1: StoryFn = () => ({
template: `<storybook-simple-token-component [name]="name"></storybook-simple-token-component>`,
props: {
name: 'Prop Name',
},
});
Story1.storyName = 'Story 1';
export const Story2: StoryFn = () => ({
template: `<storybook-simple-token-component></storybook-simple-token-component>`,
});
Story2.storyName = 'Story 2';
export default {
// title: 'Core / ModuleMetadata / In export default with decorator',
component: Story1,
decorators: [
moduleMetadata({
imports: [],
declarations: [TokenComponent],
providers: [
{
provide: ITEMS,
useValue: ['Joe', 'Jane'],
},
{
provide: DEFAULT_NAME,
useValue: 'Provider Name',
},
],
}),
],
} as Meta;
| code/frameworks/angular/template/stories/core/moduleMetadata/in-export-default.stories.ts | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.00017628329806029797,
0.00017211853992193937,
0.00016550341388210654,
0.0001733437238726765,
0.0000040606460061098915
] |
{
"id": 1,
"code_window": [
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n",
" });\n",
"\n",
" it(`returns undefined when accessing a field that does not exist`, () => {\n",
" const source = dedent`\n",
" import type { StorybookConfig } from '@storybook/react-webpack5';\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(config.getNameFromPath(['otherField'])).toEqual('foo');\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 776
} | import React from 'react';
import { IconButton, Icons } from '@storybook/components';
import { Consumer, type Combo } from '@storybook/manager-api';
import type { Addon } from '@storybook/manager-api';
const menuMapper = ({ api, state }: Combo) => ({
isVisible: state.layout.showPanel,
singleStory: state.singleStory,
panelPosition: state.layout.panelPosition,
toggle: () => api.togglePanel(),
});
export const addonsTool: Addon = {
title: 'addons',
id: 'addons',
match: ({ viewMode }) => viewMode === 'story',
render: () => (
<Consumer filter={menuMapper}>
{({ isVisible, toggle, singleStory, panelPosition }) =>
!singleStory &&
!isVisible && (
<>
<IconButton aria-label="Show addons" key="addons" onClick={toggle} title="Show addons">
<Icons icon={panelPosition === 'bottom' ? 'bottombar' : 'sidebaralt'} />
</IconButton>
</>
)
}
</Consumer>
),
};
| code/ui/manager/src/components/preview/tools/addons.tsx | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.00017669542285148054,
0.00017443238175474107,
0.00017068893066607416,
0.00017517257947474718,
0.0000023008353764453204
] |
{
"id": 1,
"code_window": [
" `;\n",
" const config = loadConfig(source).parse();\n",
" expect(config.getNameFromPath(['framework'])).toEqual('foo');\n",
" });\n",
"\n",
" it(`returns undefined when accessing a field that does not exist`, () => {\n",
" const source = dedent`\n",
" import type { StorybookConfig } from '@storybook/react-webpack5';\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" expect(config.getNameFromPath(['otherField'])).toEqual('foo');\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "add",
"edit_start_line_idx": 776
} | include button.pug | test-storybooks/server-kitchen-sink/views/addons/actions/story6.pug | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.00017861636297311634,
0.00017861636297311634,
0.00017861636297311634,
0.00017861636297311634,
0
] |
{
"id": 2,
"code_window": [
"\n",
" const config: StorybookConfig = {\n",
" addons: [\n",
" 'foo',\n",
" { name: 'bar', options: {} },\n",
" ]\n",
" }\n",
" export default config;\n",
" `;\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" ],\n",
" \"otherField\": [\n",
" \"foo\",\n",
" { \"name\": 'bar', options: {} },\n",
" ],\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "replace",
"edit_start_line_idx": 814
} | /// <reference types="@types/jest" />;
import { dedent } from 'ts-dedent';
import { formatConfig, loadConfig } from './ConfigFile';
expect.addSnapshotSerializer({
print: (val: any) => val,
test: (val) => true,
});
const getField = (path: string[], source: string) => {
const config = loadConfig(source).parse();
return config.getFieldValue(path);
};
const setField = (path: string[], value: any, source: string) => {
const config = loadConfig(source).parse();
config.setFieldValue(path, value);
return formatConfig(config);
};
const removeField = (path: string[], source: string) => {
const config = loadConfig(source).parse();
config.removeField(path);
return formatConfig(config);
};
describe('ConfigFile', () => {
describe('getField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const foo = { builder: 'webpack5' }
`
)
).toBeUndefined();
});
it('missing field', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { foo: 'webpack5' }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { builder: 'webpack5' }
`
)
).toEqual('webpack5');
});
it('found object', () => {
expect(
getField(
['core', 'builder'],
dedent`
export const core = { builder: { name: 'webpack5' } }
`
)
).toEqual({ name: 'webpack5' });
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export const core = coreVar;
`
)
).toEqual('webpack5');
});
it('variable export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export const core = coreVar;
`
)
).toEqual('webpack5');
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
module.exports = { foo: { builder: 'webpack5' } }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
module.exports = { core: { builder: 'webpack5' } }
`
)
).toEqual('webpack5');
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const core = { builder: 'webpack5' };
module.exports = { core };
`
)
).toEqual('webpack5');
});
it('variable rename', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
module.exports = { core: coreVar };
`
)
).toEqual('webpack5');
});
it('variable exports', () => {
expect(
getField(
['stories'],
dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
stories: [{ directory: '../src', titlePrefix: 'Demo' }],
}
module.exports = config;
`
)
).toEqual([{ directory: '../src', titlePrefix: 'Demo' }]);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
getField(
['core', 'builder'],
dedent`
export default { foo: { builder: 'webpack5' } }
`
)
).toBeUndefined();
});
it('found scalar', () => {
expect(
getField(
['core', 'builder'],
dedent`
export default { core: { builder: 'webpack5' } }
`
)
).toEqual('webpack5');
});
it('variable ref export', () => {
expect(
getField(
['core', 'builder'],
dedent`
const core = { builder: 'webpack5' };
export default { core };
`
)
).toEqual('webpack5');
});
it('variable rename', () => {
expect(
getField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack5' };
export default { core: coreVar };
`
)
).toEqual('webpack5');
});
it('variable exports', () => {
expect(
getField(
['stories'],
dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
stories: [{ directory: '../src', titlePrefix: 'Demo' }],
}
export default config;
`
)
).toEqual([{ directory: '../src', titlePrefix: 'Demo' }]);
});
});
});
describe('setField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const addons = [];
`
)
).toMatchInlineSnapshot(`
export const addons = [];
export const core = {
builder: "webpack5"
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const core = { foo: 'bar' };
`
)
).toMatchInlineSnapshot(`
export const core = {
foo: 'bar',
builder: 'webpack5'
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export const core = { builder: 'webpack4' };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: 'webpack5'
};
`);
});
it('found object', () => {
expect(
setField(
['core', 'builder'],
{ name: 'webpack5' },
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: {
name: 'webpack5'
}
};
`);
});
it('variable export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
const coreVar = { builder: 'webpack4' };
export const core = coreVar;
`
)
).toMatchInlineSnapshot(`
const coreVar = {
builder: 'webpack5'
};
export const core = coreVar;
`);
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: [],
core: {
builder: "webpack5"
}
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
foo: 'bar',
builder: 'webpack5'
}
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
module.exports = { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
builder: 'webpack5'
}
};
`);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: [],
core: {
builder: "webpack5"
}
};
`);
});
it('missing field', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
export default {
core: {
foo: 'bar',
builder: 'webpack5'
}
};
`);
});
it('found scalar', () => {
expect(
setField(
['core', 'builder'],
'webpack5',
dedent`
export default { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {
builder: 'webpack5'
}
};
`);
});
});
describe('quotes', () => {
it('no quotes', () => {
expect(setField(['foo', 'bar'], 'baz', '')).toMatchInlineSnapshot(`
export const foo = {
bar: "baz"
};
`);
});
it('more single quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', 'b', "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', 'b', "c"];
export const foo = {
bar: 'baz'
};
`);
});
it('more double quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', "b", "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', "b", "c"];
export const foo = {
bar: "baz"
};
`);
});
});
});
describe('removeField', () => {
describe('named exports', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const addons = [];
`
)
).toMatchInlineSnapshot(`export const addons = [];`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { foo: 'bar' };
`
)
).toMatchInlineSnapshot(`
export const core = {
foo: 'bar'
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { builder: 'webpack4' };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('found object', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('nested object', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
export const core = { builder: { name: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export const core = {
builder: {}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export const core = { 'builder': 'webpack4' };
`
)
).toMatchInlineSnapshot(`export const core = {};`);
});
it('variable export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
const coreVar = { builder: 'webpack4' };
export const core = coreVar;
`
)
).toMatchInlineSnapshot(`
const coreVar = {};
export const core = coreVar;
`);
});
it('root export variable', () => {
expect(
removeField(
['core'],
dedent`
export const core = { builder: { name: 'webpack4' } };
export const addons = [];
`
)
).toMatchInlineSnapshot(`export const addons = [];`);
});
});
describe('module exports', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: []
};
`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
foo: 'bar'
}
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {}
};
`);
});
it('nested scalar', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
module.exports = { core: { builder: { name: 'webpack4' } } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
core: {
builder: {}
}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
module.exports = { 'core': { 'builder': 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
module.exports = {
'core': {}
};
`);
});
it('root property', () => {
expect(
removeField(
['core'],
dedent`
module.exports = { core: { builder: { name: 'webpack4' } }, addons: [] };
`
)
).toMatchInlineSnapshot(`
module.exports = {
addons: []
};
`);
});
});
describe('default export', () => {
it('missing export', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: []
};
`);
});
it('missing field', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { core: { foo: 'bar' }};
`
)
).toMatchInlineSnapshot(`
export default {
core: {
foo: 'bar'
}
};
`);
});
it('found scalar', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { core: { builder: 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {}
};
`);
});
it('nested scalar', () => {
expect(
removeField(
['core', 'builder', 'name'],
dedent`
export default { core: { builder: { name: 'webpack4' } } };
`
)
).toMatchInlineSnapshot(`
export default {
core: {
builder: {}
}
};
`);
});
it('string literal key', () => {
expect(
removeField(
['core', 'builder'],
dedent`
export default { 'core': { 'builder': 'webpack4' } };
`
)
).toMatchInlineSnapshot(`
export default {
'core': {}
};
`);
});
it('root property', () => {
expect(
removeField(
['core'],
dedent`
export default { core: { builder: { name: 'webpack4' } }, addons: [] };
`
)
).toMatchInlineSnapshot(`
export default {
addons: []
};
`);
});
});
describe('quotes', () => {
it('no quotes', () => {
expect(setField(['foo', 'bar'], 'baz', '')).toMatchInlineSnapshot(`
export const foo = {
bar: "baz"
};
`);
});
it('more single quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', 'b', "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', 'b', "c"];
export const foo = {
bar: 'baz'
};
`);
});
it('more double quotes', () => {
expect(setField(['foo', 'bar'], 'baz', `export const stories = ['a', "b", "c"]`))
.toMatchInlineSnapshot(`
export const stories = ['a', "b", "c"];
export const foo = {
bar: "baz"
};
`);
});
});
});
describe('config helpers', () => {
describe('getNameFromPath', () => {
it(`supports string literal node`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: 'foo',
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toEqual('foo');
});
it(`supports object expression node with name property`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: { name: 'foo', options: { bar: require('baz') } },
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toEqual('foo');
});
it(`returns undefined when accessing a field that does not exist`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = { }
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNameFromPath(['framework'])).toBeUndefined();
});
it(`throws an error when node is of unexpected type`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
framework: makesNoSense(),
}
export default config;
`;
const config = loadConfig(source).parse();
expect(() => config.getNameFromPath(['framework'])).toThrowError(
`The given node must be a string literal or an object expression with a "name" property that is a string literal.`
);
});
});
describe('getNamesFromPath', () => {
it(`supports an array with string literal and object expression with name property`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = {
addons: [
'foo',
{ name: 'bar', options: {} },
]
}
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNamesFromPath(['addons'])).toEqual(['foo', 'bar']);
});
});
it(`returns undefined when accessing a field that does not exist`, () => {
const source = dedent`
import type { StorybookConfig } from '@storybook/react-webpack5';
const config: StorybookConfig = { }
export default config;
`;
const config = loadConfig(source).parse();
expect(config.getNamesFromPath(['addons'])).toBeUndefined();
});
});
});
| code/lib/csf-tools/src/ConfigFile.test.ts | 1 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.998128354549408,
0.10563486814498901,
0.00016304812743328512,
0.00021130195818841457,
0.30088159441947937
] |
{
"id": 2,
"code_window": [
"\n",
" const config: StorybookConfig = {\n",
" addons: [\n",
" 'foo',\n",
" { name: 'bar', options: {} },\n",
" ]\n",
" }\n",
" export default config;\n",
" `;\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" ],\n",
" \"otherField\": [\n",
" \"foo\",\n",
" { \"name\": 'bar', options: {} },\n",
" ],\n"
],
"file_path": "code/lib/csf-tools/src/ConfigFile.test.ts",
"type": "replace",
"edit_start_line_idx": 814
} | // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`react component properties js-function-component-inline-defaults-no-propTypes 1`] = `
Object {
"arrayOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "arrayOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "['array', 'optional']",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"booleanOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "booleanOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "false",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"dateOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "dateOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "new Date('20 Jan 1983')",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"functionOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "functionOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "() => 'foo'",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"globalReference": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "globalReference",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "Date",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"importedReference": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "importedReference",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "imported",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"localReference": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "localReference",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "'local-value'",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"numberOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "numberOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "1",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"objectOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "objectOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "{ object: 'optional' }",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"stringGlobalName": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "stringGlobalName",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "'top'",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
"stringOptional": Object {
"control": Object {
"type": "object",
},
"description": undefined,
"name": "stringOptional",
"table": Object {
"defaultValue": Object {
"detail": undefined,
"summary": "'stringOptional'",
},
"jsDocTags": undefined,
"type": Object {
"detail": undefined,
"summary": "unknown",
},
},
"type": Object {
"required": false,
},
},
}
`;
| code/renderers/react/template/stories/docgen-components/js-function-component-inline-defaults-no-propTypes/argTypes.snapshot | 0 | https://github.com/storybookjs/storybook/commit/4691ac0f76a0e3fce09bf338ef699e063f5a3fa5 | [
0.00017604074673727155,
0.00017299554019700736,
0.00016784484614618123,
0.0001731570955598727,
0.0000016712237993488088
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.