hunk
dict
file
stringlengths
0
11.8M
file_path
stringlengths
2
234
label
int64
0
1
commit_url
stringlengths
74
103
dependency_score
listlengths
5
5
{ "id": 0, "code_window": [ "export interface LoadExploreDataSourcesPayload {\n", " exploreId: ExploreId;\n", " exploreDatasources: DataSourceSelectItem[];\n", "}\n", "\n", "/**\n", " * Adds a query row after the row with the given index.\n", " */\n" ], "labels": [ "keep", "keep", "keep", "keep", "add", "keep", "keep", "keep" ], "after_edit": [ "export interface RunQueriesPayload {\n", " exploreId: ExploreId;\n", "}\n", "\n" ], "file_path": "public/app/features/explore/state/actionTypes.ts", "type": "add", "edit_start_line_idx": 211 }
import _ from 'lodash'; import { describe, beforeEach, it, expect } from '../../../../../test/lib/common'; import TimeSeries from 'app/core/time_series2'; import { convertToHeatMap, convertToCards, histogramToHeatmap, calculateBucketSize, isHeatmapDataEqual, } from '../heatmap_data_converter'; describe('isHeatmapDataEqual', () => { const ctx: any = {}; beforeEach(() => { ctx.heatmapA = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1, 1.5] }, '2': { y: 2, values: [1] }, }, }, }; ctx.heatmapB = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1.5, 1] }, '2': { y: 2, values: [1] }, }, }, }; }); it('should proper compare objects', () => { const heatmapC = _.cloneDeep(ctx.heatmapA); heatmapC['1422774000000'].buckets['1'].values = [1, 1.5]; const heatmapD = _.cloneDeep(ctx.heatmapA); heatmapD['1422774000000'].buckets['1'].values = [1.5, 1, 1.6]; const heatmapE = _.cloneDeep(ctx.heatmapA); heatmapE['1422774000000'].buckets['1'].values = [1, 1.6]; const empty = {}; const emptyValues = _.cloneDeep(ctx.heatmapA); emptyValues['1422774000000'].buckets['1'].values = []; expect(isHeatmapDataEqual(ctx.heatmapA, ctx.heatmapB)).toBe(true); expect(isHeatmapDataEqual(ctx.heatmapB, ctx.heatmapA)).toBe(true); expect(isHeatmapDataEqual(ctx.heatmapA, heatmapC)).toBe(true); expect(isHeatmapDataEqual(heatmapC, ctx.heatmapA)).toBe(true); expect(isHeatmapDataEqual(ctx.heatmapA, heatmapD)).toBe(false); expect(isHeatmapDataEqual(heatmapD, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(ctx.heatmapA, heatmapE)).toBe(false); expect(isHeatmapDataEqual(heatmapE, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(empty, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(ctx.heatmapA, empty)).toBe(false); expect(isHeatmapDataEqual(emptyValues, ctx.heatmapA)).toBe(false); expect(isHeatmapDataEqual(ctx.heatmapA, emptyValues)).toBe(false); }); }); describe('calculateBucketSize', () => { const ctx: any = {}; describe('when logBase is 1 (linear scale)', () => { beforeEach(() => { ctx.logBase = 1; ctx.bounds_set = [ { bounds: [], size: 0 }, { bounds: [0], size: 0 }, { bounds: [4], size: 4 }, { bounds: [0, 1, 2, 3, 4], size: 1 }, { bounds: [0, 1, 3, 5, 7], size: 1 }, { bounds: [0, 3, 7, 9, 15], size: 2 }, { bounds: [0, 7, 3, 15, 9], size: 2 }, { bounds: [0, 5, 10, 15, 50], size: 5 }, ]; }); it('should properly calculate bucket size', () => { _.each(ctx.bounds_set, b => { const bucketSize = calculateBucketSize(b.bounds, ctx.logBase); expect(bucketSize).toBe(b.size); }); }); }); describe('when logBase is 2', () => { beforeEach(() => { ctx.logBase = 2; ctx.bounds_set = [ { bounds: [], size: 0 }, { bounds: [0], size: 0 }, { bounds: [4], size: 4 }, { bounds: [1, 2, 4, 8], size: 1 }, { bounds: [1, Math.SQRT2, 2, 8, 16], size: 0.5 }, ]; }); it('should properly calculate bucket size', () => { _.each(ctx.bounds_set, b => { const bucketSize = calculateBucketSize(b.bounds, ctx.logBase); expect(isEqual(bucketSize, b.size)).toBe(true); }); }); }); }); describe('HeatmapDataConverter', () => { const ctx: any = {}; beforeEach(() => { ctx.series = []; ctx.series.push( new TimeSeries({ datapoints: [[1, 1422774000000], [1, 1422774000010], [2, 1422774060000]], alias: 'series1', }) ); ctx.series.push( new TimeSeries({ datapoints: [[2, 1422774000000], [2, 1422774000010], [3, 1422774060000]], alias: 'series2', }) ); ctx.series.push( new TimeSeries({ datapoints: [[5, 1422774000000], [3, 1422774000010], [4, 1422774060000]], alias: 'series3', }) ); ctx.xBucketSize = 60000; // 60s ctx.yBucketSize = 2; ctx.logBase = 1; }); describe('when logBase is 1 (linear scale)', () => { beforeEach(() => { ctx.logBase = 1; }); it('should build proper heatmap data', () => { const expectedHeatmap = { '1422774000000': { x: 1422774000000, buckets: { '0': { y: 0, values: [1, 1], count: 2, bounds: { bottom: 0, top: 2 }, }, '2': { y: 2, values: [2, 2, 3], count: 3, bounds: { bottom: 2, top: 4 }, }, '4': { y: 4, values: [5], count: 1, bounds: { bottom: 4, top: 6 } }, }, }, '1422774060000': { x: 1422774060000, buckets: { '2': { y: 2, values: [2, 3], count: 3, bounds: { bottom: 2, top: 4 }, }, '4': { y: 4, values: [4], count: 1, bounds: { bottom: 4, top: 6 } }, }, }, }; const heatmap = convertToHeatMap(ctx.series, ctx.yBucketSize, ctx.xBucketSize, ctx.logBase); expect(isHeatmapDataEqual(heatmap, expectedHeatmap)).toBe(true); }); }); describe.skip('when logBase is 2', () => { beforeEach(() => { ctx.logBase = 2; }); it('should build proper heatmap data', () => { const expectedHeatmap = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1] }, '2': { y: 2, values: [2] }, }, }, '1422774060000': { x: 1422774060000, buckets: { '2': { y: 2, values: [2, 3] }, }, }, }; const heatmap = convertToHeatMap(ctx.series, ctx.yBucketSize, ctx.xBucketSize, ctx.logBase); expect(isHeatmapDataEqual(heatmap, expectedHeatmap)).toBe(true); }); }); }); describe('Histogram converter', () => { const ctx: any = {}; beforeEach(() => { ctx.series = []; ctx.series.push( new TimeSeries({ datapoints: [[1, 1422774000000], [0, 1422774060000]], alias: '1', label: '1', }) ); ctx.series.push( new TimeSeries({ datapoints: [[5, 1422774000000], [3, 1422774060000]], alias: '2', label: '2', }) ); ctx.series.push( new TimeSeries({ datapoints: [[0, 1422774000000], [1, 1422774060000]], alias: '3', label: '3', }) ); }); describe('when converting histogram', () => { beforeEach(() => {}); it('should build proper heatmap data', () => { const expectedHeatmap = { '1422774000000': { x: 1422774000000, buckets: { '0': { y: 0, count: 1, bounds: { bottom: 0, top: null }, values: [], points: [], }, '1': { y: 1, count: 5, bounds: { bottom: 1, top: null }, values: [], points: [], }, '2': { y: 2, count: 0, bounds: { bottom: 2, top: null }, values: [], points: [], }, }, }, '1422774060000': { x: 1422774060000, buckets: { '0': { y: 0, count: 0, bounds: { bottom: 0, top: null }, values: [], points: [], }, '1': { y: 1, count: 3, bounds: { bottom: 1, top: null }, values: [], points: [], }, '2': { y: 2, count: 1, bounds: { bottom: 2, top: null }, values: [], points: [], }, }, }, }; const heatmap = histogramToHeatmap(ctx.series); expect(heatmap).toEqual(expectedHeatmap); }); it('should use bucket index as a bound', () => { const heatmap = histogramToHeatmap(ctx.series); const bucketLabels = _.map(heatmap['1422774000000'].buckets, (b, label) => label); const bucketYs = _.map(heatmap['1422774000000'].buckets, 'y'); const bucketBottoms = _.map(heatmap['1422774000000'].buckets, b => b.bounds.bottom); const expectedBounds = [0, 1, 2]; expect(bucketLabels).toEqual(_.map(expectedBounds, b => b.toString())); expect(bucketYs).toEqual(expectedBounds); expect(bucketBottoms).toEqual(expectedBounds); }); }); }); describe('convertToCards', () => { let buckets = {}; beforeEach(() => { buckets = { '1422774000000': { x: 1422774000000, buckets: { '1': { y: 1, values: [1], count: 1, bounds: {} }, '2': { y: 2, values: [2], count: 1, bounds: {} }, }, }, '1422774060000': { x: 1422774060000, buckets: { '2': { y: 2, values: [2, 3], count: 2, bounds: {} }, }, }, }; }); it('should build proper cards data', () => { const expectedCards = [ { x: 1422774000000, y: 1, count: 1, values: [1], yBounds: {} }, { x: 1422774000000, y: 2, count: 1, values: [2], yBounds: {} }, { x: 1422774060000, y: 2, count: 2, values: [2, 3], yBounds: {} }, ]; const res = convertToCards(buckets); expect(res.cards).toMatchObject(expectedCards); }); it('should build proper cards stats', () => { const expectedStats = { min: 1, max: 2 }; const res = convertToCards(buckets); expect(res.cardStats).toMatchObject(expectedStats); }); }); /** * Compare two numbers with given precision. Suitable for compare float numbers after conversions with precision loss. * @param a * @param b * @param precision */ function isEqual(a: number, b: number, precision = 0.000001): boolean { if (a === b) { return true; } else { return Math.abs(1 - a / b) <= precision; } }
public/app/plugins/panel/heatmap/specs/heatmap_data_converter.test.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017283206398133188, 0.00016932323342189193, 0.000165446603205055, 0.00016916097956709564, 0.000001787136625353014 ]
{ "id": 0, "code_window": [ "export interface LoadExploreDataSourcesPayload {\n", " exploreId: ExploreId;\n", " exploreDatasources: DataSourceSelectItem[];\n", "}\n", "\n", "/**\n", " * Adds a query row after the row with the given index.\n", " */\n" ], "labels": [ "keep", "keep", "keep", "keep", "add", "keep", "keep", "keep" ], "after_edit": [ "export interface RunQueriesPayload {\n", " exploreId: ExploreId;\n", "}\n", "\n" ], "file_path": "public/app/features/explore/state/actionTypes.ts", "type": "add", "edit_start_line_idx": 211 }
package pq import ( "bufio" "crypto/md5" "database/sql" "database/sql/driver" "encoding/binary" "errors" "fmt" "io" "net" "os" "os/user" "path" "path/filepath" "strconv" "strings" "time" "unicode" "github.com/lib/pq/oid" ) // Common error types var ( ErrNotSupported = errors.New("pq: Unsupported command") ErrInFailedTransaction = errors.New("pq: Could not complete operation in a failed transaction") ErrSSLNotSupported = errors.New("pq: SSL is not enabled on the server") ErrSSLKeyHasWorldPermissions = errors.New("pq: Private key file has group or world access. Permissions should be u=rw (0600) or less") ErrCouldNotDetectUsername = errors.New("pq: Could not detect default username. Please provide one explicitly") errUnexpectedReady = errors.New("unexpected ReadyForQuery") errNoRowsAffected = errors.New("no RowsAffected available after the empty statement") errNoLastInsertID = errors.New("no LastInsertId available after the empty statement") ) // Driver is the Postgres database driver. type Driver struct{} // Open opens a new connection to the database. name is a connection string. // Most users should only use it through database/sql package from the standard // library. func (d *Driver) Open(name string) (driver.Conn, error) { return Open(name) } func init() { sql.Register("postgres", &Driver{}) } type parameterStatus struct { // server version in the same format as server_version_num, or 0 if // unavailable serverVersion int // the current location based on the TimeZone value of the session, if // available currentLocation *time.Location } type transactionStatus byte const ( txnStatusIdle transactionStatus = 'I' txnStatusIdleInTransaction transactionStatus = 'T' txnStatusInFailedTransaction transactionStatus = 'E' ) func (s transactionStatus) String() string { switch s { case txnStatusIdle: return "idle" case txnStatusIdleInTransaction: return "idle in transaction" case txnStatusInFailedTransaction: return "in a failed transaction" default: errorf("unknown transactionStatus %d", s) } panic("not reached") } // Dialer is the dialer interface. It can be used to obtain more control over // how pq creates network connections. type Dialer interface { Dial(network, address string) (net.Conn, error) DialTimeout(network, address string, timeout time.Duration) (net.Conn, error) } type defaultDialer struct{} func (d defaultDialer) Dial(ntw, addr string) (net.Conn, error) { return net.Dial(ntw, addr) } func (d defaultDialer) DialTimeout(ntw, addr string, timeout time.Duration) (net.Conn, error) { return net.DialTimeout(ntw, addr, timeout) } type conn struct { c net.Conn buf *bufio.Reader namei int scratch [512]byte txnStatus transactionStatus txnFinish func() // Save connection arguments to use during CancelRequest. dialer Dialer opts values // Cancellation key data for use with CancelRequest messages. processID int secretKey int parameterStatus parameterStatus saveMessageType byte saveMessageBuffer []byte // If true, this connection is bad and all public-facing functions should // return ErrBadConn. bad bool // If set, this connection should never use the binary format when // receiving query results from prepared statements. Only provided for // debugging. disablePreparedBinaryResult bool // Whether to always send []byte parameters over as binary. Enables single // round-trip mode for non-prepared Query calls. binaryParameters bool // If true this connection is in the middle of a COPY inCopy bool } // Handle driver-side settings in parsed connection string. func (cn *conn) handleDriverSettings(o values) (err error) { boolSetting := func(key string, val *bool) error { if value, ok := o[key]; ok { if value == "yes" { *val = true } else if value == "no" { *val = false } else { return fmt.Errorf("unrecognized value %q for %s", value, key) } } return nil } err = boolSetting("disable_prepared_binary_result", &cn.disablePreparedBinaryResult) if err != nil { return err } return boolSetting("binary_parameters", &cn.binaryParameters) } func (cn *conn) handlePgpass(o values) { // if a password was supplied, do not process .pgpass if _, ok := o["password"]; ok { return } filename := os.Getenv("PGPASSFILE") if filename == "" { // XXX this code doesn't work on Windows where the default filename is // XXX %APPDATA%\postgresql\pgpass.conf // Prefer $HOME over user.Current due to glibc bug: golang.org/issue/13470 userHome := os.Getenv("HOME") if userHome == "" { user, err := user.Current() if err != nil { return } userHome = user.HomeDir } filename = filepath.Join(userHome, ".pgpass") } fileinfo, err := os.Stat(filename) if err != nil { return } mode := fileinfo.Mode() if mode&(0x77) != 0 { // XXX should warn about incorrect .pgpass permissions as psql does return } file, err := os.Open(filename) if err != nil { return } defer file.Close() scanner := bufio.NewScanner(io.Reader(file)) hostname := o["host"] ntw, _ := network(o) port := o["port"] db := o["dbname"] username := o["user"] // From: https://github.com/tg/pgpass/blob/master/reader.go getFields := func(s string) []string { fs := make([]string, 0, 5) f := make([]rune, 0, len(s)) var esc bool for _, c := range s { switch { case esc: f = append(f, c) esc = false case c == '\\': esc = true case c == ':': fs = append(fs, string(f)) f = f[:0] default: f = append(f, c) } } return append(fs, string(f)) } for scanner.Scan() { line := scanner.Text() if len(line) == 0 || line[0] == '#' { continue } split := getFields(line) if len(split) != 5 { continue } if (split[0] == "*" || split[0] == hostname || (split[0] == "localhost" && (hostname == "" || ntw == "unix"))) && (split[1] == "*" || split[1] == port) && (split[2] == "*" || split[2] == db) && (split[3] == "*" || split[3] == username) { o["password"] = split[4] return } } } func (cn *conn) writeBuf(b byte) *writeBuf { cn.scratch[0] = b return &writeBuf{ buf: cn.scratch[:5], pos: 1, } } // Open opens a new connection to the database. name is a connection string. // Most users should only use it through database/sql package from the standard // library. func Open(name string) (_ driver.Conn, err error) { return DialOpen(defaultDialer{}, name) } // DialOpen opens a new connection to the database using a dialer. func DialOpen(d Dialer, name string) (_ driver.Conn, err error) { // Handle any panics during connection initialization. Note that we // specifically do *not* want to use errRecover(), as that would turn any // connection errors into ErrBadConns, hiding the real error message from // the user. defer errRecoverNoErrBadConn(&err) o := make(values) // A number of defaults are applied here, in this order: // // * Very low precedence defaults applied in every situation // * Environment variables // * Explicitly passed connection information o["host"] = "localhost" o["port"] = "5432" // N.B.: Extra float digits should be set to 3, but that breaks // Postgres 8.4 and older, where the max is 2. o["extra_float_digits"] = "2" for k, v := range parseEnviron(os.Environ()) { o[k] = v } if strings.HasPrefix(name, "postgres://") || strings.HasPrefix(name, "postgresql://") { name, err = ParseURL(name) if err != nil { return nil, err } } if err := parseOpts(name, o); err != nil { return nil, err } // Use the "fallback" application name if necessary if fallback, ok := o["fallback_application_name"]; ok { if _, ok := o["application_name"]; !ok { o["application_name"] = fallback } } // We can't work with any client_encoding other than UTF-8 currently. // However, we have historically allowed the user to set it to UTF-8 // explicitly, and there's no reason to break such programs, so allow that. // Note that the "options" setting could also set client_encoding, but // parsing its value is not worth it. Instead, we always explicitly send // client_encoding as a separate run-time parameter, which should override // anything set in options. if enc, ok := o["client_encoding"]; ok && !isUTF8(enc) { return nil, errors.New("client_encoding must be absent or 'UTF8'") } o["client_encoding"] = "UTF8" // DateStyle needs a similar treatment. if datestyle, ok := o["datestyle"]; ok { if datestyle != "ISO, MDY" { panic(fmt.Sprintf("setting datestyle must be absent or %v; got %v", "ISO, MDY", datestyle)) } } else { o["datestyle"] = "ISO, MDY" } // If a user is not provided by any other means, the last // resort is to use the current operating system provided user // name. if _, ok := o["user"]; !ok { u, err := userCurrent() if err != nil { return nil, err } o["user"] = u } cn := &conn{ opts: o, dialer: d, } err = cn.handleDriverSettings(o) if err != nil { return nil, err } cn.handlePgpass(o) cn.c, err = dial(d, o) if err != nil { return nil, err } err = cn.ssl(o) if err != nil { return nil, err } // cn.startup panics on error. Make sure we don't leak cn.c. panicking := true defer func() { if panicking { cn.c.Close() } }() cn.buf = bufio.NewReader(cn.c) cn.startup(o) // reset the deadline, in case one was set (see dial) if timeout, ok := o["connect_timeout"]; ok && timeout != "0" { err = cn.c.SetDeadline(time.Time{}) } panicking = false return cn, err } func dial(d Dialer, o values) (net.Conn, error) { ntw, addr := network(o) // SSL is not necessary or supported over UNIX domain sockets if ntw == "unix" { o["sslmode"] = "disable" } // Zero or not specified means wait indefinitely. if timeout, ok := o["connect_timeout"]; ok && timeout != "0" { seconds, err := strconv.ParseInt(timeout, 10, 0) if err != nil { return nil, fmt.Errorf("invalid value for parameter connect_timeout: %s", err) } duration := time.Duration(seconds) * time.Second // connect_timeout should apply to the entire connection establishment // procedure, so we both use a timeout for the TCP connection // establishment and set a deadline for doing the initial handshake. // The deadline is then reset after startup() is done. deadline := time.Now().Add(duration) conn, err := d.DialTimeout(ntw, addr, duration) if err != nil { return nil, err } err = conn.SetDeadline(deadline) return conn, err } return d.Dial(ntw, addr) } func network(o values) (string, string) { host := o["host"] if strings.HasPrefix(host, "/") { sockPath := path.Join(host, ".s.PGSQL."+o["port"]) return "unix", sockPath } return "tcp", net.JoinHostPort(host, o["port"]) } type values map[string]string // scanner implements a tokenizer for libpq-style option strings. type scanner struct { s []rune i int } // newScanner returns a new scanner initialized with the option string s. func newScanner(s string) *scanner { return &scanner{[]rune(s), 0} } // Next returns the next rune. // It returns 0, false if the end of the text has been reached. func (s *scanner) Next() (rune, bool) { if s.i >= len(s.s) { return 0, false } r := s.s[s.i] s.i++ return r, true } // SkipSpaces returns the next non-whitespace rune. // It returns 0, false if the end of the text has been reached. func (s *scanner) SkipSpaces() (rune, bool) { r, ok := s.Next() for unicode.IsSpace(r) && ok { r, ok = s.Next() } return r, ok } // parseOpts parses the options from name and adds them to the values. // // The parsing code is based on conninfo_parse from libpq's fe-connect.c func parseOpts(name string, o values) error { s := newScanner(name) for { var ( keyRunes, valRunes []rune r rune ok bool ) if r, ok = s.SkipSpaces(); !ok { break } // Scan the key for !unicode.IsSpace(r) && r != '=' { keyRunes = append(keyRunes, r) if r, ok = s.Next(); !ok { break } } // Skip any whitespace if we're not at the = yet if r != '=' { r, ok = s.SkipSpaces() } // The current character should be = if r != '=' || !ok { return fmt.Errorf(`missing "=" after %q in connection info string"`, string(keyRunes)) } // Skip any whitespace after the = if r, ok = s.SkipSpaces(); !ok { // If we reach the end here, the last value is just an empty string as per libpq. o[string(keyRunes)] = "" break } if r != '\'' { for !unicode.IsSpace(r) { if r == '\\' { if r, ok = s.Next(); !ok { return fmt.Errorf(`missing character after backslash`) } } valRunes = append(valRunes, r) if r, ok = s.Next(); !ok { break } } } else { quote: for { if r, ok = s.Next(); !ok { return fmt.Errorf(`unterminated quoted string literal in connection string`) } switch r { case '\'': break quote case '\\': r, _ = s.Next() fallthrough default: valRunes = append(valRunes, r) } } } o[string(keyRunes)] = string(valRunes) } return nil } func (cn *conn) isInTransaction() bool { return cn.txnStatus == txnStatusIdleInTransaction || cn.txnStatus == txnStatusInFailedTransaction } func (cn *conn) checkIsInTransaction(intxn bool) { if cn.isInTransaction() != intxn { cn.bad = true errorf("unexpected transaction status %v", cn.txnStatus) } } func (cn *conn) Begin() (_ driver.Tx, err error) { return cn.begin("") } func (cn *conn) begin(mode string) (_ driver.Tx, err error) { if cn.bad { return nil, driver.ErrBadConn } defer cn.errRecover(&err) cn.checkIsInTransaction(false) _, commandTag, err := cn.simpleExec("BEGIN" + mode) if err != nil { return nil, err } if commandTag != "BEGIN" { cn.bad = true return nil, fmt.Errorf("unexpected command tag %s", commandTag) } if cn.txnStatus != txnStatusIdleInTransaction { cn.bad = true return nil, fmt.Errorf("unexpected transaction status %v", cn.txnStatus) } return cn, nil } func (cn *conn) closeTxn() { if finish := cn.txnFinish; finish != nil { finish() } } func (cn *conn) Commit() (err error) { defer cn.closeTxn() if cn.bad { return driver.ErrBadConn } defer cn.errRecover(&err) cn.checkIsInTransaction(true) // We don't want the client to think that everything is okay if it tries // to commit a failed transaction. However, no matter what we return, // database/sql will release this connection back into the free connection // pool so we have to abort the current transaction here. Note that you // would get the same behaviour if you issued a COMMIT in a failed // transaction, so it's also the least surprising thing to do here. if cn.txnStatus == txnStatusInFailedTransaction { if err := cn.Rollback(); err != nil { return err } return ErrInFailedTransaction } _, commandTag, err := cn.simpleExec("COMMIT") if err != nil { if cn.isInTransaction() { cn.bad = true } return err } if commandTag != "COMMIT" { cn.bad = true return fmt.Errorf("unexpected command tag %s", commandTag) } cn.checkIsInTransaction(false) return nil } func (cn *conn) Rollback() (err error) { defer cn.closeTxn() if cn.bad { return driver.ErrBadConn } defer cn.errRecover(&err) cn.checkIsInTransaction(true) _, commandTag, err := cn.simpleExec("ROLLBACK") if err != nil { if cn.isInTransaction() { cn.bad = true } return err } if commandTag != "ROLLBACK" { return fmt.Errorf("unexpected command tag %s", commandTag) } cn.checkIsInTransaction(false) return nil } func (cn *conn) gname() string { cn.namei++ return strconv.FormatInt(int64(cn.namei), 10) } func (cn *conn) simpleExec(q string) (res driver.Result, commandTag string, err error) { b := cn.writeBuf('Q') b.string(q) cn.send(b) for { t, r := cn.recv1() switch t { case 'C': res, commandTag = cn.parseComplete(r.string()) case 'Z': cn.processReadyForQuery(r) if res == nil && err == nil { err = errUnexpectedReady } // done return case 'E': err = parseError(r) case 'I': res = emptyRows case 'T', 'D': // ignore any results default: cn.bad = true errorf("unknown response for simple query: %q", t) } } } func (cn *conn) simpleQuery(q string) (res *rows, err error) { defer cn.errRecover(&err) b := cn.writeBuf('Q') b.string(q) cn.send(b) for { t, r := cn.recv1() switch t { case 'C', 'I': // We allow queries which don't return any results through Query as // well as Exec. We still have to give database/sql a rows object // the user can close, though, to avoid connections from being // leaked. A "rows" with done=true works fine for that purpose. if err != nil { cn.bad = true errorf("unexpected message %q in simple query execution", t) } if res == nil { res = &rows{ cn: cn, } } // Set the result and tag to the last command complete if there wasn't a // query already run. Although queries usually return from here and cede // control to Next, a query with zero results does not. if t == 'C' && res.colNames == nil { res.result, res.tag = cn.parseComplete(r.string()) } res.done = true case 'Z': cn.processReadyForQuery(r) // done return case 'E': res = nil err = parseError(r) case 'D': if res == nil { cn.bad = true errorf("unexpected DataRow in simple query execution") } // the query didn't fail; kick off to Next cn.saveMessage(t, r) return case 'T': // res might be non-nil here if we received a previous // CommandComplete, but that's fine; just overwrite it res = &rows{cn: cn} res.colNames, res.colFmts, res.colTyps = parsePortalRowDescribe(r) // To work around a bug in QueryRow in Go 1.2 and earlier, wait // until the first DataRow has been received. default: cn.bad = true errorf("unknown response for simple query: %q", t) } } } type noRows struct{} var emptyRows noRows var _ driver.Result = noRows{} func (noRows) LastInsertId() (int64, error) { return 0, errNoLastInsertID } func (noRows) RowsAffected() (int64, error) { return 0, errNoRowsAffected } // Decides which column formats to use for a prepared statement. The input is // an array of type oids, one element per result column. func decideColumnFormats(colTyps []fieldDesc, forceText bool) (colFmts []format, colFmtData []byte) { if len(colTyps) == 0 { return nil, colFmtDataAllText } colFmts = make([]format, len(colTyps)) if forceText { return colFmts, colFmtDataAllText } allBinary := true allText := true for i, t := range colTyps { switch t.OID { // This is the list of types to use binary mode for when receiving them // through a prepared statement. If a type appears in this list, it // must also be implemented in binaryDecode in encode.go. case oid.T_bytea: fallthrough case oid.T_int8: fallthrough case oid.T_int4: fallthrough case oid.T_int2: fallthrough case oid.T_uuid: colFmts[i] = formatBinary allText = false default: allBinary = false } } if allBinary { return colFmts, colFmtDataAllBinary } else if allText { return colFmts, colFmtDataAllText } else { colFmtData = make([]byte, 2+len(colFmts)*2) binary.BigEndian.PutUint16(colFmtData, uint16(len(colFmts))) for i, v := range colFmts { binary.BigEndian.PutUint16(colFmtData[2+i*2:], uint16(v)) } return colFmts, colFmtData } } func (cn *conn) prepareTo(q, stmtName string) *stmt { st := &stmt{cn: cn, name: stmtName} b := cn.writeBuf('P') b.string(st.name) b.string(q) b.int16(0) b.next('D') b.byte('S') b.string(st.name) b.next('S') cn.send(b) cn.readParseResponse() st.paramTyps, st.colNames, st.colTyps = cn.readStatementDescribeResponse() st.colFmts, st.colFmtData = decideColumnFormats(st.colTyps, cn.disablePreparedBinaryResult) cn.readReadyForQuery() return st } func (cn *conn) Prepare(q string) (_ driver.Stmt, err error) { if cn.bad { return nil, driver.ErrBadConn } defer cn.errRecover(&err) if len(q) >= 4 && strings.EqualFold(q[:4], "COPY") { s, err := cn.prepareCopyIn(q) if err == nil { cn.inCopy = true } return s, err } return cn.prepareTo(q, cn.gname()), nil } func (cn *conn) Close() (err error) { // Skip cn.bad return here because we always want to close a connection. defer cn.errRecover(&err) // Ensure that cn.c.Close is always run. Since error handling is done with // panics and cn.errRecover, the Close must be in a defer. defer func() { cerr := cn.c.Close() if err == nil { err = cerr } }() // Don't go through send(); ListenerConn relies on us not scribbling on the // scratch buffer of this connection. return cn.sendSimpleMessage('X') } // Implement the "Queryer" interface func (cn *conn) Query(query string, args []driver.Value) (driver.Rows, error) { return cn.query(query, args) } func (cn *conn) query(query string, args []driver.Value) (_ *rows, err error) { if cn.bad { return nil, driver.ErrBadConn } if cn.inCopy { return nil, errCopyInProgress } defer cn.errRecover(&err) // Check to see if we can use the "simpleQuery" interface, which is // *much* faster than going through prepare/exec if len(args) == 0 { return cn.simpleQuery(query) } if cn.binaryParameters { cn.sendBinaryModeQuery(query, args) cn.readParseResponse() cn.readBindResponse() rows := &rows{cn: cn} rows.colNames, rows.colFmts, rows.colTyps = cn.readPortalDescribeResponse() cn.postExecuteWorkaround() return rows, nil } st := cn.prepareTo(query, "") st.exec(args) return &rows{ cn: cn, colNames: st.colNames, colTyps: st.colTyps, colFmts: st.colFmts, }, nil } // Implement the optional "Execer" interface for one-shot queries func (cn *conn) Exec(query string, args []driver.Value) (res driver.Result, err error) { if cn.bad { return nil, driver.ErrBadConn } defer cn.errRecover(&err) // Check to see if we can use the "simpleExec" interface, which is // *much* faster than going through prepare/exec if len(args) == 0 { // ignore commandTag, our caller doesn't care r, _, err := cn.simpleExec(query) return r, err } if cn.binaryParameters { cn.sendBinaryModeQuery(query, args) cn.readParseResponse() cn.readBindResponse() cn.readPortalDescribeResponse() cn.postExecuteWorkaround() res, _, err = cn.readExecuteResponse("Execute") return res, err } // Use the unnamed statement to defer planning until bind // time, or else value-based selectivity estimates cannot be // used. st := cn.prepareTo(query, "") r, err := st.Exec(args) if err != nil { panic(err) } return r, err } func (cn *conn) send(m *writeBuf) { _, err := cn.c.Write(m.wrap()) if err != nil { panic(err) } } func (cn *conn) sendStartupPacket(m *writeBuf) error { _, err := cn.c.Write((m.wrap())[1:]) return err } // Send a message of type typ to the server on the other end of cn. The // message should have no payload. This method does not use the scratch // buffer. func (cn *conn) sendSimpleMessage(typ byte) (err error) { _, err = cn.c.Write([]byte{typ, '\x00', '\x00', '\x00', '\x04'}) return err } // saveMessage memorizes a message and its buffer in the conn struct. // recvMessage will then return these values on the next call to it. This // method is useful in cases where you have to see what the next message is // going to be (e.g. to see whether it's an error or not) but you can't handle // the message yourself. func (cn *conn) saveMessage(typ byte, buf *readBuf) { if cn.saveMessageType != 0 { cn.bad = true errorf("unexpected saveMessageType %d", cn.saveMessageType) } cn.saveMessageType = typ cn.saveMessageBuffer = *buf } // recvMessage receives any message from the backend, or returns an error if // a problem occurred while reading the message. func (cn *conn) recvMessage(r *readBuf) (byte, error) { // workaround for a QueryRow bug, see exec if cn.saveMessageType != 0 { t := cn.saveMessageType *r = cn.saveMessageBuffer cn.saveMessageType = 0 cn.saveMessageBuffer = nil return t, nil } x := cn.scratch[:5] _, err := io.ReadFull(cn.buf, x) if err != nil { return 0, err } // read the type and length of the message that follows t := x[0] n := int(binary.BigEndian.Uint32(x[1:])) - 4 var y []byte if n <= len(cn.scratch) { y = cn.scratch[:n] } else { y = make([]byte, n) } _, err = io.ReadFull(cn.buf, y) if err != nil { return 0, err } *r = y return t, nil } // recv receives a message from the backend, but if an error happened while // reading the message or the received message was an ErrorResponse, it panics. // NoticeResponses are ignored. This function should generally be used only // during the startup sequence. func (cn *conn) recv() (t byte, r *readBuf) { for { var err error r = &readBuf{} t, err = cn.recvMessage(r) if err != nil { panic(err) } switch t { case 'E': panic(parseError(r)) case 'N': // ignore default: return } } } // recv1Buf is exactly equivalent to recv1, except it uses a buffer supplied by // the caller to avoid an allocation. func (cn *conn) recv1Buf(r *readBuf) byte { for { t, err := cn.recvMessage(r) if err != nil { panic(err) } switch t { case 'A', 'N': // ignore case 'S': cn.processParameterStatus(r) default: return t } } } // recv1 receives a message from the backend, panicking if an error occurs // while attempting to read it. All asynchronous messages are ignored, with // the exception of ErrorResponse. func (cn *conn) recv1() (t byte, r *readBuf) { r = &readBuf{} t = cn.recv1Buf(r) return t, r } func (cn *conn) ssl(o values) error { upgrade, err := ssl(o) if err != nil { return err } if upgrade == nil { // Nothing to do return nil } w := cn.writeBuf(0) w.int32(80877103) if err = cn.sendStartupPacket(w); err != nil { return err } b := cn.scratch[:1] _, err = io.ReadFull(cn.c, b) if err != nil { return err } if b[0] != 'S' { return ErrSSLNotSupported } cn.c, err = upgrade(cn.c) return err } // isDriverSetting returns true iff a setting is purely for configuring the // driver's options and should not be sent to the server in the connection // startup packet. func isDriverSetting(key string) bool { switch key { case "host", "port": return true case "password": return true case "sslmode", "sslcert", "sslkey", "sslrootcert": return true case "fallback_application_name": return true case "connect_timeout": return true case "disable_prepared_binary_result": return true case "binary_parameters": return true default: return false } } func (cn *conn) startup(o values) { w := cn.writeBuf(0) w.int32(196608) // Send the backend the name of the database we want to connect to, and the // user we want to connect as. Additionally, we send over any run-time // parameters potentially included in the connection string. If the server // doesn't recognize any of them, it will reply with an error. for k, v := range o { if isDriverSetting(k) { // skip options which can't be run-time parameters continue } // The protocol requires us to supply the database name as "database" // instead of "dbname". if k == "dbname" { k = "database" } w.string(k) w.string(v) } w.string("") if err := cn.sendStartupPacket(w); err != nil { panic(err) } for { t, r := cn.recv() switch t { case 'K': cn.processBackendKeyData(r) case 'S': cn.processParameterStatus(r) case 'R': cn.auth(r, o) case 'Z': cn.processReadyForQuery(r) return default: errorf("unknown response for startup: %q", t) } } } func (cn *conn) auth(r *readBuf, o values) { switch code := r.int32(); code { case 0: // OK case 3: w := cn.writeBuf('p') w.string(o["password"]) cn.send(w) t, r := cn.recv() if t != 'R' { errorf("unexpected password response: %q", t) } if r.int32() != 0 { errorf("unexpected authentication response: %q", t) } case 5: s := string(r.next(4)) w := cn.writeBuf('p') w.string("md5" + md5s(md5s(o["password"]+o["user"])+s)) cn.send(w) t, r := cn.recv() if t != 'R' { errorf("unexpected password response: %q", t) } if r.int32() != 0 { errorf("unexpected authentication response: %q", t) } default: errorf("unknown authentication response: %d", code) } } type format int const formatText format = 0 const formatBinary format = 1 // One result-column format code with the value 1 (i.e. all binary). var colFmtDataAllBinary = []byte{0, 1, 0, 1} // No result-column format codes (i.e. all text). var colFmtDataAllText = []byte{0, 0} type stmt struct { cn *conn name string colNames []string colFmts []format colFmtData []byte colTyps []fieldDesc paramTyps []oid.Oid closed bool } func (st *stmt) Close() (err error) { if st.closed { return nil } if st.cn.bad { return driver.ErrBadConn } defer st.cn.errRecover(&err) w := st.cn.writeBuf('C') w.byte('S') w.string(st.name) st.cn.send(w) st.cn.send(st.cn.writeBuf('S')) t, _ := st.cn.recv1() if t != '3' { st.cn.bad = true errorf("unexpected close response: %q", t) } st.closed = true t, r := st.cn.recv1() if t != 'Z' { st.cn.bad = true errorf("expected ready for query, but got: %q", t) } st.cn.processReadyForQuery(r) return nil } func (st *stmt) Query(v []driver.Value) (r driver.Rows, err error) { if st.cn.bad { return nil, driver.ErrBadConn } defer st.cn.errRecover(&err) st.exec(v) return &rows{ cn: st.cn, colNames: st.colNames, colTyps: st.colTyps, colFmts: st.colFmts, }, nil } func (st *stmt) Exec(v []driver.Value) (res driver.Result, err error) { if st.cn.bad { return nil, driver.ErrBadConn } defer st.cn.errRecover(&err) st.exec(v) res, _, err = st.cn.readExecuteResponse("simple query") return res, err } func (st *stmt) exec(v []driver.Value) { if len(v) >= 65536 { errorf("got %d parameters but PostgreSQL only supports 65535 parameters", len(v)) } if len(v) != len(st.paramTyps) { errorf("got %d parameters but the statement requires %d", len(v), len(st.paramTyps)) } cn := st.cn w := cn.writeBuf('B') w.byte(0) // unnamed portal w.string(st.name) if cn.binaryParameters { cn.sendBinaryParameters(w, v) } else { w.int16(0) w.int16(len(v)) for i, x := range v { if x == nil { w.int32(-1) } else { b := encode(&cn.parameterStatus, x, st.paramTyps[i]) w.int32(len(b)) w.bytes(b) } } } w.bytes(st.colFmtData) w.next('E') w.byte(0) w.int32(0) w.next('S') cn.send(w) cn.readBindResponse() cn.postExecuteWorkaround() } func (st *stmt) NumInput() int { return len(st.paramTyps) } // parseComplete parses the "command tag" from a CommandComplete message, and // returns the number of rows affected (if applicable) and a string // identifying only the command that was executed, e.g. "ALTER TABLE". If the // command tag could not be parsed, parseComplete panics. func (cn *conn) parseComplete(commandTag string) (driver.Result, string) { commandsWithAffectedRows := []string{ "SELECT ", // INSERT is handled below "UPDATE ", "DELETE ", "FETCH ", "MOVE ", "COPY ", } var affectedRows *string for _, tag := range commandsWithAffectedRows { if strings.HasPrefix(commandTag, tag) { t := commandTag[len(tag):] affectedRows = &t commandTag = tag[:len(tag)-1] break } } // INSERT also includes the oid of the inserted row in its command tag. // Oids in user tables are deprecated, and the oid is only returned when // exactly one row is inserted, so it's unlikely to be of value to any // real-world application and we can ignore it. if affectedRows == nil && strings.HasPrefix(commandTag, "INSERT ") { parts := strings.Split(commandTag, " ") if len(parts) != 3 { cn.bad = true errorf("unexpected INSERT command tag %s", commandTag) } affectedRows = &parts[len(parts)-1] commandTag = "INSERT" } // There should be no affected rows attached to the tag, just return it if affectedRows == nil { return driver.RowsAffected(0), commandTag } n, err := strconv.ParseInt(*affectedRows, 10, 64) if err != nil { cn.bad = true errorf("could not parse commandTag: %s", err) } return driver.RowsAffected(n), commandTag } type rows struct { cn *conn finish func() colNames []string colTyps []fieldDesc colFmts []format done bool rb readBuf result driver.Result tag string } func (rs *rows) Close() error { if finish := rs.finish; finish != nil { defer finish() } // no need to look at cn.bad as Next() will for { err := rs.Next(nil) switch err { case nil: case io.EOF: // rs.Next can return io.EOF on both 'Z' (ready for query) and 'T' (row // description, used with HasNextResultSet). We need to fetch messages until // we hit a 'Z', which is done by waiting for done to be set. if rs.done { return nil } default: return err } } } func (rs *rows) Columns() []string { return rs.colNames } func (rs *rows) Result() driver.Result { if rs.result == nil { return emptyRows } return rs.result } func (rs *rows) Tag() string { return rs.tag } func (rs *rows) Next(dest []driver.Value) (err error) { if rs.done { return io.EOF } conn := rs.cn if conn.bad { return driver.ErrBadConn } defer conn.errRecover(&err) for { t := conn.recv1Buf(&rs.rb) switch t { case 'E': err = parseError(&rs.rb) case 'C', 'I': if t == 'C' { rs.result, rs.tag = conn.parseComplete(rs.rb.string()) } continue case 'Z': conn.processReadyForQuery(&rs.rb) rs.done = true if err != nil { return err } return io.EOF case 'D': n := rs.rb.int16() if err != nil { conn.bad = true errorf("unexpected DataRow after error %s", err) } if n < len(dest) { dest = dest[:n] } for i := range dest { l := rs.rb.int32() if l == -1 { dest[i] = nil continue } dest[i] = decode(&conn.parameterStatus, rs.rb.next(l), rs.colTyps[i].OID, rs.colFmts[i]) } return case 'T': rs.colNames, rs.colFmts, rs.colTyps = parsePortalRowDescribe(&rs.rb) return io.EOF default: errorf("unexpected message after execute: %q", t) } } } func (rs *rows) HasNextResultSet() bool { return !rs.done } func (rs *rows) NextResultSet() error { return nil } // QuoteIdentifier quotes an "identifier" (e.g. a table or a column name) to be // used as part of an SQL statement. For example: // // tblname := "my_table" // data := "my_data" // quoted := pq.QuoteIdentifier(tblname) // err := db.Exec(fmt.Sprintf("INSERT INTO %s VALUES ($1)", quoted), data) // // Any double quotes in name will be escaped. The quoted identifier will be // case sensitive when used in a query. If the input string contains a zero // byte, the result will be truncated immediately before it. func QuoteIdentifier(name string) string { end := strings.IndexRune(name, 0) if end > -1 { name = name[:end] } return `"` + strings.Replace(name, `"`, `""`, -1) + `"` } func md5s(s string) string { h := md5.New() h.Write([]byte(s)) return fmt.Sprintf("%x", h.Sum(nil)) } func (cn *conn) sendBinaryParameters(b *writeBuf, args []driver.Value) { // Do one pass over the parameters to see if we're going to send any of // them over in binary. If we are, create a paramFormats array at the // same time. var paramFormats []int for i, x := range args { _, ok := x.([]byte) if ok { if paramFormats == nil { paramFormats = make([]int, len(args)) } paramFormats[i] = 1 } } if paramFormats == nil { b.int16(0) } else { b.int16(len(paramFormats)) for _, x := range paramFormats { b.int16(x) } } b.int16(len(args)) for _, x := range args { if x == nil { b.int32(-1) } else { datum := binaryEncode(&cn.parameterStatus, x) b.int32(len(datum)) b.bytes(datum) } } } func (cn *conn) sendBinaryModeQuery(query string, args []driver.Value) { if len(args) >= 65536 { errorf("got %d parameters but PostgreSQL only supports 65535 parameters", len(args)) } b := cn.writeBuf('P') b.byte(0) // unnamed statement b.string(query) b.int16(0) b.next('B') b.int16(0) // unnamed portal and statement cn.sendBinaryParameters(b, args) b.bytes(colFmtDataAllText) b.next('D') b.byte('P') b.byte(0) // unnamed portal b.next('E') b.byte(0) b.int32(0) b.next('S') cn.send(b) } func (cn *conn) processParameterStatus(r *readBuf) { var err error param := r.string() switch param { case "server_version": var major1 int var major2 int var minor int _, err = fmt.Sscanf(r.string(), "%d.%d.%d", &major1, &major2, &minor) if err == nil { cn.parameterStatus.serverVersion = major1*10000 + major2*100 + minor } case "TimeZone": cn.parameterStatus.currentLocation, err = time.LoadLocation(r.string()) if err != nil { cn.parameterStatus.currentLocation = nil } default: // ignore } } func (cn *conn) processReadyForQuery(r *readBuf) { cn.txnStatus = transactionStatus(r.byte()) } func (cn *conn) readReadyForQuery() { t, r := cn.recv1() switch t { case 'Z': cn.processReadyForQuery(r) return default: cn.bad = true errorf("unexpected message %q; expected ReadyForQuery", t) } } func (cn *conn) processBackendKeyData(r *readBuf) { cn.processID = r.int32() cn.secretKey = r.int32() } func (cn *conn) readParseResponse() { t, r := cn.recv1() switch t { case '1': return case 'E': err := parseError(r) cn.readReadyForQuery() panic(err) default: cn.bad = true errorf("unexpected Parse response %q", t) } } func (cn *conn) readStatementDescribeResponse() (paramTyps []oid.Oid, colNames []string, colTyps []fieldDesc) { for { t, r := cn.recv1() switch t { case 't': nparams := r.int16() paramTyps = make([]oid.Oid, nparams) for i := range paramTyps { paramTyps[i] = r.oid() } case 'n': return paramTyps, nil, nil case 'T': colNames, colTyps = parseStatementRowDescribe(r) return paramTyps, colNames, colTyps case 'E': err := parseError(r) cn.readReadyForQuery() panic(err) default: cn.bad = true errorf("unexpected Describe statement response %q", t) } } } func (cn *conn) readPortalDescribeResponse() (colNames []string, colFmts []format, colTyps []fieldDesc) { t, r := cn.recv1() switch t { case 'T': return parsePortalRowDescribe(r) case 'n': return nil, nil, nil case 'E': err := parseError(r) cn.readReadyForQuery() panic(err) default: cn.bad = true errorf("unexpected Describe response %q", t) } panic("not reached") } func (cn *conn) readBindResponse() { t, r := cn.recv1() switch t { case '2': return case 'E': err := parseError(r) cn.readReadyForQuery() panic(err) default: cn.bad = true errorf("unexpected Bind response %q", t) } } func (cn *conn) postExecuteWorkaround() { // Work around a bug in sql.DB.QueryRow: in Go 1.2 and earlier it ignores // any errors from rows.Next, which masks errors that happened during the // execution of the query. To avoid the problem in common cases, we wait // here for one more message from the database. If it's not an error the // query will likely succeed (or perhaps has already, if it's a // CommandComplete), so we push the message into the conn struct; recv1 // will return it as the next message for rows.Next or rows.Close. // However, if it's an error, we wait until ReadyForQuery and then return // the error to our caller. for { t, r := cn.recv1() switch t { case 'E': err := parseError(r) cn.readReadyForQuery() panic(err) case 'C', 'D', 'I': // the query didn't fail, but we can't process this message cn.saveMessage(t, r) return default: cn.bad = true errorf("unexpected message during extended query execution: %q", t) } } } // Only for Exec(), since we ignore the returned data func (cn *conn) readExecuteResponse(protocolState string) (res driver.Result, commandTag string, err error) { for { t, r := cn.recv1() switch t { case 'C': if err != nil { cn.bad = true errorf("unexpected CommandComplete after error %s", err) } res, commandTag = cn.parseComplete(r.string()) case 'Z': cn.processReadyForQuery(r) if res == nil && err == nil { err = errUnexpectedReady } return res, commandTag, err case 'E': err = parseError(r) case 'T', 'D', 'I': if err != nil { cn.bad = true errorf("unexpected %q after error %s", t, err) } if t == 'I' { res = emptyRows } // ignore any results default: cn.bad = true errorf("unknown %s response: %q", protocolState, t) } } } func parseStatementRowDescribe(r *readBuf) (colNames []string, colTyps []fieldDesc) { n := r.int16() colNames = make([]string, n) colTyps = make([]fieldDesc, n) for i := range colNames { colNames[i] = r.string() r.next(6) colTyps[i].OID = r.oid() colTyps[i].Len = r.int16() colTyps[i].Mod = r.int32() // format code not known when describing a statement; always 0 r.next(2) } return } func parsePortalRowDescribe(r *readBuf) (colNames []string, colFmts []format, colTyps []fieldDesc) { n := r.int16() colNames = make([]string, n) colFmts = make([]format, n) colTyps = make([]fieldDesc, n) for i := range colNames { colNames[i] = r.string() r.next(6) colTyps[i].OID = r.oid() colTyps[i].Len = r.int16() colTyps[i].Mod = r.int32() colFmts[i] = format(r.int16()) } return } // parseEnviron tries to mimic some of libpq's environment handling // // To ease testing, it does not directly reference os.Environ, but is // designed to accept its output. // // Environment-set connection information is intended to have a higher // precedence than a library default but lower than any explicitly // passed information (such as in the URL or connection string). func parseEnviron(env []string) (out map[string]string) { out = make(map[string]string) for _, v := range env { parts := strings.SplitN(v, "=", 2) accrue := func(keyname string) { out[keyname] = parts[1] } unsupported := func() { panic(fmt.Sprintf("setting %v not supported", parts[0])) } // The order of these is the same as is seen in the // PostgreSQL 9.1 manual. Unsupported but well-defined // keys cause a panic; these should be unset prior to // execution. Options which pq expects to be set to a // certain value are allowed, but must be set to that // value if present (they can, of course, be absent). switch parts[0] { case "PGHOST": accrue("host") case "PGHOSTADDR": unsupported() case "PGPORT": accrue("port") case "PGDATABASE": accrue("dbname") case "PGUSER": accrue("user") case "PGPASSWORD": accrue("password") case "PGSERVICE", "PGSERVICEFILE", "PGREALM": unsupported() case "PGOPTIONS": accrue("options") case "PGAPPNAME": accrue("application_name") case "PGSSLMODE": accrue("sslmode") case "PGSSLCERT": accrue("sslcert") case "PGSSLKEY": accrue("sslkey") case "PGSSLROOTCERT": accrue("sslrootcert") case "PGREQUIRESSL", "PGSSLCRL": unsupported() case "PGREQUIREPEER": unsupported() case "PGKRBSRVNAME", "PGGSSLIB": unsupported() case "PGCONNECT_TIMEOUT": accrue("connect_timeout") case "PGCLIENTENCODING": accrue("client_encoding") case "PGDATESTYLE": accrue("datestyle") case "PGTZ": accrue("timezone") case "PGGEQO": accrue("geqo") case "PGSYSCONFDIR", "PGLOCALEDIR": unsupported() } } return out } // isUTF8 returns whether name is a fuzzy variation of the string "UTF-8". func isUTF8(name string) bool { // Recognize all sorts of silly things as "UTF-8", like Postgres does s := strings.Map(alnumLowerASCII, name) return s == "utf8" || s == "unicode" } func alnumLowerASCII(ch rune) rune { if 'A' <= ch && ch <= 'Z' { return ch + ('a' - 'A') } if 'a' <= ch && ch <= 'z' || '0' <= ch && ch <= '9' { return ch } return -1 // discard }
vendor/github.com/lib/pq/conn.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00019037901074625552, 0.00016869220416992903, 0.0001637867244426161, 0.00016866698570083827, 0.000002622301963128848 ]
{ "id": 1, "code_window": [ "\n", "/**\n", " * Remove query row of the given index, as well as associated query results.\n", " */\n", "export const removeQueryRowAction = actionCreatorFactory<RemoveQueryRowPayload>('explore/REMOVE_QUERY_ROW').create();\n", "export const runQueriesAction = noPayloadActionCreatorFactory('explore/RUN_QUERIES').create();\n", "\n", "/**\n", " * Start a scan for more results using the given scanner.\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\n", "export const runQueriesAction = actionCreatorFactory<RunQueriesPayload>('explore/RUN_QUERIES').create();\n" ], "file_path": "public/app/features/explore/state/actionTypes.ts", "type": "replace", "edit_start_line_idx": 326 }
// @ts-ignore import _ from 'lodash'; import { calculateResultsFromQueryTransactions, generateEmptyQuery, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, QueryTransaction, ExploreId, ExploreUpdateState } from 'app/types/explore'; import { DataQuery } from '@grafana/ui/src/types'; import { HigherOrderAction, ActionTypes, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { addQueryRowAction, changeQueryAction, changeSizeAction, changeTimeAction, clearQueriesAction, highlightLogsExpressionAction, initializeExploreAction, updateDatasourceInstanceAction, loadDatasourceMissingAction, loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, removeQueryRowAction, scanRangeAction, scanStartAction, scanStopAction, setQueriesAction, toggleGraphAction, toggleLogsAction, toggleTableAction, queriesImportedAction, updateUIStateAction, toggleLogLevelAction, } from './actionTypes'; import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; export const DEFAULT_RANGE = { from: 'now-6h', to: 'now', }; // Millies step for helper bar charts const DEFAULT_GRAPH_INTERVAL = 15 * 1000; export const makeInitialUpdateState = (): ExploreUpdateState => ({ datasource: false, queries: false, range: false, ui: false, }); /** * Returns a fresh Explore area state */ export const makeExploreItemState = (): ExploreItemState => ({ StartPage: undefined, containerWidth: 0, datasourceInstance: null, requestedDatasourceName: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, exploreDatasources: [], history: [], queries: [], initialized: false, queryTransactions: [], queryIntervals: { interval: '15s', intervalMs: DEFAULT_GRAPH_INTERVAL }, range: DEFAULT_RANGE, scanning: false, scanRange: null, showingGraph: true, showingLogs: true, showingTable: true, supportsGraph: null, supportsLogs: null, supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), }); /** * Global Explore state that handles multiple Explore areas and the split state */ export const initialExploreState: ExploreState = { split: null, left: makeExploreItemState(), right: makeExploreItemState(), }; /** * Reducer for an Explore area, to be used by the global Explore reducer. */ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemState) .addMapper({ filter: addQueryRowAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { index, query } = action.payload; // Add to queries, which will cause a new row to be rendered const nextQueries = [...queries.slice(0, index + 1), { ...query }, ...queries.slice(index + 1)]; // Ongoing transactions need to update their row indices const nextQueryTransactions = queryTransactions.map(qt => { if (qt.rowIndex > index) { return { ...qt, rowIndex: qt.rowIndex + 1, }; } return qt; }); return { ...state, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeQueryAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { query, index } = action.payload; // Override path: queries are completely reset const nextQuery: DataQuery = { ...query, ...generateEmptyQuery(state.queries) }; const nextQueries = [...queries]; nextQueries[index] = nextQuery; // Discard ongoing transaction related to row query const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index); return { ...state, queries: nextQueries, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeSizeAction, mapper: (state, action): ExploreItemState => { const { range, datasourceInstance } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; } const containerWidth = action.payload.width; const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, containerWidth, queryIntervals }; }, }) .addMapper({ filter: changeTimeAction, mapper: (state, action): ExploreItemState => { return { ...state, range: action.payload.range }; }, }) .addMapper({ filter: clearQueriesAction, mapper: (state): ExploreItemState => { const queries = ensureQueries(); return { ...state, queries: queries.slice(), queryTransactions: [], showingStartPage: Boolean(state.StartPage), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: highlightLogsExpressionAction, mapper: (state, action): ExploreItemState => { const { expressions } = action.payload; return { ...state, logsHighlighterExpressions: expressions }; }, }) .addMapper({ filter: initializeExploreAction, mapper: (state, action): ExploreItemState => { const { containerWidth, eventBridge, queries, range, ui } = action.payload; return { ...state, containerWidth, eventBridge, range, queries, initialized: true, queryKeys: getQueryKeys(queries, state.datasourceInstance), ...ui, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: updateDatasourceInstanceAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance } = action.payload; // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; const supportsTable = datasourceInstance.meta.tables; // Custom components const StartPage = datasourceInstance.components.ExploreStartPage; return { ...state, datasourceInstance, supportsGraph, supportsLogs, supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), }; }, }) .addMapper({ filter: loadDatasourceMissingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceMissing: true, datasourceLoading: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadDatasourcePendingAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceLoading: true, requestedDatasourceName: action.payload.requestedDatasourceName, }; }, }) .addMapper({ filter: loadDatasourceReadyAction, mapper: (state, action): ExploreItemState => { const { containerWidth, range, datasourceInstance } = state; const { history } = action.payload; const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth); return { ...state, queryIntervals, history, datasourceLoading: false, datasourceMissing: false, logsHighlighterExpressions: undefined, queryTransactions: [], update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: modifyQueriesAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { modification, index, modifier } = action.payload; let nextQueries: DataQuery[]; let nextQueryTransactions: QueryTransaction[]; if (index === undefined) { // Modify all queries nextQueries = queries.map((query, i) => ({ ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries), })); // Discard all ongoing transactions nextQueryTransactions = []; } else { // Modify query only at index nextQueries = queries.map((query, i) => { // Synchronize all queries with local query cache to ensure consistency // TODO still needed? return i === index ? { ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries) } : query; }); nextQueryTransactions = queryTransactions // Consume the hint corresponding to the action .map(qt => { if (qt.hints != null && qt.rowIndex === index) { qt.hints = qt.hints.filter(hint => hint.fix.action !== modification); } return qt; }) // Preserve previous row query transaction to keep results visible if next query is incomplete .filter(qt => modification.preventSubmit || qt.rowIndex !== index); } return { ...state, queries: nextQueries, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), queryTransactions: nextQueryTransactions, }; }, }) .addMapper({ filter: queryTransactionFailureAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = action.payload; return { ...state, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionStartAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = state; const { resultType, rowIndex, transaction } = action.payload; // Discarding existing transactions of same type const remainingTransactions = queryTransactions.filter( qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex) ); // Append new transaction const nextQueryTransactions: QueryTransaction[] = [...remainingTransactions, transaction]; return { ...state, queryTransactions: nextQueryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionSuccessAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queryIntervals } = state; const { history, queryTransactions } = action.payload; const results = calculateResultsFromQueryTransactions( queryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, history, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queries, queryIntervals, queryTransactions, queryKeys } = state; const { index } = action.payload; if (queries.length <= 1) { return state; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; const nextQueryKeys = [...queryKeys.slice(0, index), ...queryKeys.slice(index + 1)]; // Discard transactions related to row query const nextQueryTransactions = queryTransactions.filter(qt => nextQueries.some(nq => nq.key === qt.query.key)); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: nextQueryKeys, }; }, }) .addMapper({ filter: scanRangeAction, mapper: (state, action): ExploreItemState => { return { ...state, scanRange: action.payload.range }; }, }) .addMapper({ filter: scanStartAction, mapper: (state, action): ExploreItemState => { return { ...state, scanning: true, scanner: action.payload.scanner }; }, }) .addMapper({ filter: scanStopAction, mapper: (state): ExploreItemState => { const { queryTransactions } = state; const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done); return { ...state, queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined, scanner: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: setQueriesAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries: queries.slice(), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: updateUIStateAction, mapper: (state, action): ExploreItemState => { return { ...state, ...action.payload }; }, }) .addMapper({ filter: toggleGraphAction, mapper: (state): ExploreItemState => { const showingGraph = !state.showingGraph; let nextQueryTransactions = state.queryTransactions; if (!showingGraph) { // Discard transactions related to Graph query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleLogsAction, mapper: (state): ExploreItemState => { const showingLogs = !state.showingLogs; let nextQueryTransactions = state.queryTransactions; if (!showingLogs) { // Discard transactions related to Logs query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleTableAction, mapper: (state): ExploreItemState => { const showingTable = !state.showingTable; if (showingTable) { return { ...state, queryTransactions: state.queryTransactions }; } // Toggle off needs discarding of table queries and results const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table'); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, state.datasourceInstance, state.queryIntervals.intervalMs ); return { ...state, ...results, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: queriesImportedAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries, queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: toggleLogLevelAction, mapper: (state, action): ExploreItemState => { const { hiddenLogLevels } = action.payload; return { ...state, hiddenLogLevels: Array.from(hiddenLogLevels), }; }, }) .addMapper({ filter: testDataSourcePendingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceSuccessAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceFailureAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceError: action.payload.error, queryTransactions: [], graphResult: undefined, tableResult: undefined, logsResult: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadExploreDatasources, mapper: (state, action): ExploreItemState => { return { ...state, exploreDatasources: action.payload.exploreDatasources, }; }, }) .create(); export const updateChildRefreshState = ( state: Readonly<ExploreItemState>, payload: LocationUpdate, exploreId: ExploreId ): ExploreItemState => { const path = payload.path || ''; const queryState = payload.query[exploreId] as string; if (!queryState) { return state; } const urlState = parseUrlState(queryState); if (!state.urlState || path !== '/explore') { // we only want to refresh when browser back/forward return { ...state, urlState, update: { datasource: false, queries: false, range: false, ui: false } }; } const datasource = _.isEqual(urlState ? urlState.datasource : '', state.urlState.datasource) === false; const queries = _.isEqual(urlState ? urlState.queries : [], state.urlState.queries) === false; const range = _.isEqual(urlState ? urlState.range : DEFAULT_RANGE, state.urlState.range) === false; const ui = _.isEqual(urlState ? urlState.ui : DEFAULT_UI_STATE, state.urlState.ui) === false; return { ...state, urlState, update: { ...state.update, datasource, queries, range, ui, }, }; }; /** * Global Explore reducer that handles multiple Explore areas (left and right). * Actions that have an `exploreId` get routed to the ExploreItemReducer. */ export const exploreReducer = (state = initialExploreState, action: HigherOrderAction): ExploreState => { switch (action.type) { case splitCloseAction.type: { const { itemId } = action.payload as SplitCloseActionPayload; const targetSplit = { left: itemId === ExploreId.left ? state.right : state.left, right: initialExploreState.right, }; return { ...state, ...targetSplit, split: false, }; } case ActionTypes.SplitOpen: { return { ...state, split: true, right: { ...action.payload.itemState } }; } case ActionTypes.ResetExplore: { return initialExploreState; } case updateLocation.type: { const { query } = action.payload; if (!query || !query[ExploreId.left]) { return state; } const split = query[ExploreId.right] ? true : false; const leftState = state[ExploreId.left]; const rightState = state[ExploreId.right]; return { ...state, split, [ExploreId.left]: updateChildRefreshState(leftState, action.payload, ExploreId.left), [ExploreId.right]: updateChildRefreshState(rightState, action.payload, ExploreId.right), }; } } if (action.payload) { const { exploreId } = action.payload as any; if (exploreId !== undefined) { const exploreItemState = state[exploreId]; return { ...state, [exploreId]: itemReducer(exploreItemState, action) }; } } return state; }; export default { explore: exploreReducer, };
public/app/features/explore/state/reducers.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9942620992660522, 0.015315218828618526, 0.00016492432041559368, 0.000170340936165303, 0.12142404913902283 ]
{ "id": 1, "code_window": [ "\n", "/**\n", " * Remove query row of the given index, as well as associated query results.\n", " */\n", "export const removeQueryRowAction = actionCreatorFactory<RemoveQueryRowPayload>('explore/REMOVE_QUERY_ROW').create();\n", "export const runQueriesAction = noPayloadActionCreatorFactory('explore/RUN_QUERIES').create();\n", "\n", "/**\n", " * Start a scan for more results using the given scanner.\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\n", "export const runQueriesAction = actionCreatorFactory<RunQueriesPayload>('explore/RUN_QUERIES').create();\n" ], "file_path": "public/app/features/explore/state/actionTypes.ts", "type": "replace", "edit_start_line_idx": 326 }
<?xml version="1.0" encoding="iso-8859-1"?> <!-- Generator: Adobe Illustrator 19.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> <svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="100px" height="100px" viewBox="0 0 100 100" style="enable-background:new 0 0 100 100;" xml:space="preserve"> <polyline style="fill:none;stroke:#898989;stroke-width:2;stroke-miterlimit:10;" points="4.734,34.349 36.05,19.26 64.876,36.751 96.308,6.946 "/> <circle style="fill:#898989;" cx="4.885" cy="33.929" r="4.885"/> <circle style="fill:#898989;" cx="35.95" cy="19.545" r="4.885"/> <circle style="fill:#898989;" cx="65.047" cy="36.046" r="4.885"/> <circle style="fill:#898989;" cx="94.955" cy="7.135" r="4.885"/> <g> <linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="5" y1="103.7019" x2="5" y2="32.0424"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_1_);" d="M9.001,48.173H0.999C0.447,48.173,0,48.62,0,49.172V100h10V49.172 C10,48.62,9.553,48.173,9.001,48.173z"/> <linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="5" y1="98.9423" x2="5" y2="53.1961"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_2_);" d="M0,69.173v30.563h10V69.173"/> <linearGradient id="SVGID_3_" gradientUnits="userSpaceOnUse" x1="5" y1="99.4343" x2="5" y2="74.4359"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_3_);" d="M0,83.166v16.701h10V83.166"/> </g> <g> <linearGradient id="SVGID_4_" gradientUnits="userSpaceOnUse" x1="20" y1="103.7019" x2="20" y2="32.0424"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_4_);" d="M24.001,40.769h-8.002c-0.552,0-0.999,0.447-0.999,0.999V100h10V41.768 C25,41.216,24.553,40.769,24.001,40.769z"/> <linearGradient id="SVGID_5_" gradientUnits="userSpaceOnUse" x1="20" y1="98.9423" x2="20" y2="53.1961"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_5_);" d="M15,64.716v35.02h10v-35.02"/> <linearGradient id="SVGID_6_" gradientUnits="userSpaceOnUse" x1="20" y1="99.4343" x2="20" y2="74.4359"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_6_);" d="M15,80.731v19.137h10V80.731"/> </g> <g> <linearGradient id="SVGID_7_" gradientUnits="userSpaceOnUse" x1="35" y1="103.7019" x2="35" y2="32.0424"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_7_);" d="M39.001,34.423h-8.002c-0.552,0-0.999,0.447-0.999,0.999V100h10V35.422 C40,34.87,39.553,34.423,39.001,34.423z"/> <linearGradient id="SVGID_8_" gradientUnits="userSpaceOnUse" x1="35" y1="98.9423" x2="35" y2="53.1961"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_8_);" d="M30,60.895v38.84h10v-38.84"/> <linearGradient id="SVGID_9_" gradientUnits="userSpaceOnUse" x1="35" y1="99.4343" x2="35" y2="74.4359"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_9_);" d="M30,78.643v21.225h10V78.643"/> </g> <g> <linearGradient id="SVGID_10_" gradientUnits="userSpaceOnUse" x1="50" y1="103.7019" x2="50" y2="32.0424"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_10_);" d="M54.001,41.827h-8.002c-0.552,0-0.999,0.447-0.999,0.999V100h10V42.826 C55,42.274,54.553,41.827,54.001,41.827z"/> <linearGradient id="SVGID_11_" gradientUnits="userSpaceOnUse" x1="50" y1="98.9423" x2="50" y2="53.1961"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_11_);" d="M45,65.352v34.383h10V65.352"/> <linearGradient id="SVGID_12_" gradientUnits="userSpaceOnUse" x1="50" y1="99.4343" x2="50" y2="74.4359"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_12_);" d="M45,81.079v18.789h10V81.079"/> </g> <g> <linearGradient id="SVGID_13_" gradientUnits="userSpaceOnUse" x1="65" y1="103.8575" x2="65" y2="29.1875"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_13_);" d="M69.001,50.404h-8.002c-0.552,0-0.999,0.447-0.999,0.999V100h10V51.403 C70,50.851,69.553,50.404,69.001,50.404z"/> <linearGradient id="SVGID_14_" gradientUnits="userSpaceOnUse" x1="65" y1="98.8979" x2="65" y2="51.2298"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_14_);" d="M60,70.531v29.193h10V70.531"/> <linearGradient id="SVGID_15_" gradientUnits="userSpaceOnUse" x1="65" y1="99.4105" x2="65" y2="73.3619"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_15_);" d="M60,83.909v15.953h10V83.909"/> </g> <g> <linearGradient id="SVGID_16_" gradientUnits="userSpaceOnUse" x1="80" y1="104.4108" x2="80" y2="19.0293"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_16_);" d="M84.001,40.769h-8.002c-0.552,0-0.999,0.447-0.999,0.999V100h10V41.768 C85,41.216,84.553,40.769,84.001,40.769z"/> <linearGradient id="SVGID_17_" gradientUnits="userSpaceOnUse" x1="80" y1="98.9423" x2="80" y2="53.1961"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_17_);" d="M75,64.716v35.02h10v-35.02"/> <linearGradient id="SVGID_18_" gradientUnits="userSpaceOnUse" x1="80" y1="99.4343" x2="80" y2="74.4359"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_18_);" d="M75,80.731v19.137h10V80.731"/> </g> <g> <linearGradient id="SVGID_19_" gradientUnits="userSpaceOnUse" x1="95" y1="103.5838" x2="95" y2="34.2115"> <stop offset="0" style="stop-color:#FFF33B"/> <stop offset="0" style="stop-color:#FFD53F"/> <stop offset="0" style="stop-color:#FBBC40"/> <stop offset="0" style="stop-color:#F7A840"/> <stop offset="0" style="stop-color:#F59B40"/> <stop offset="0" style="stop-color:#F3933F"/> <stop offset="0" style="stop-color:#F3903F"/> <stop offset="0.8423" style="stop-color:#ED683C"/> <stop offset="1" style="stop-color:#E93E3A"/> </linearGradient> <path style="fill:url(#SVGID_19_);" d="M99.001,21.157h-8.002c-0.552,0-0.999,0.447-0.999,0.999V100h10V22.156 C100,21.604,99.553,21.157,99.001,21.157z"/> <linearGradient id="SVGID_20_" gradientUnits="userSpaceOnUse" x1="95" y1="98.9761" x2="95" y2="54.69"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#F99B1C"/> </linearGradient> <path style="fill:url(#SVGID_20_);" d="M90,52.898v46.846h10V52.898"/> <linearGradient id="SVGID_21_" gradientUnits="userSpaceOnUse" x1="95" y1="99.4524" x2="95" y2="75.2518"> <stop offset="0" style="stop-color:#FEBC11"/> <stop offset="1" style="stop-color:#FFDE17"/> </linearGradient> <path style="fill:url(#SVGID_21_);" d="M90,74.272v25.6h10v-25.6"/> </g> </svg>
public/app/plugins/panel/graph2/img/icn-graph-panel.svg
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017581814609002322, 0.0001746233319863677, 0.00017118196410592645, 0.00017476164794061333, 9.826109135246952e-7 ]
{ "id": 1, "code_window": [ "\n", "/**\n", " * Remove query row of the given index, as well as associated query results.\n", " */\n", "export const removeQueryRowAction = actionCreatorFactory<RemoveQueryRowPayload>('explore/REMOVE_QUERY_ROW').create();\n", "export const runQueriesAction = noPayloadActionCreatorFactory('explore/RUN_QUERIES').create();\n", "\n", "/**\n", " * Start a scan for more results using the given scanner.\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\n", "export const runQueriesAction = actionCreatorFactory<RunQueriesPayload>('explore/RUN_QUERIES').create();\n" ], "file_path": "public/app/features/explore/state/actionTypes.ts", "type": "replace", "edit_start_line_idx": 326 }
import { useRef, useEffect } from 'react'; export const useRefMounted = () => { const refMounted = useRef(false); useEffect(() => { refMounted.current = true; return () => { refMounted.current = false; }; }); return refMounted; };
public/app/core/hooks/useRefMounted.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017397094052284956, 0.00017318260506726801, 0.00017239426961168647, 0.00017318260506726801, 7.883354555815458e-7 ]
{ "id": 1, "code_window": [ "\n", "/**\n", " * Remove query row of the given index, as well as associated query results.\n", " */\n", "export const removeQueryRowAction = actionCreatorFactory<RemoveQueryRowPayload>('explore/REMOVE_QUERY_ROW').create();\n", "export const runQueriesAction = noPayloadActionCreatorFactory('explore/RUN_QUERIES').create();\n", "\n", "/**\n", " * Start a scan for more results using the given scanner.\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\n", "export const runQueriesAction = actionCreatorFactory<RunQueriesPayload>('explore/RUN_QUERIES').create();\n" ], "file_path": "public/app/features/explore/state/actionTypes.ts", "type": "replace", "edit_start_line_idx": 326 }
+++ title = "GitLab OAuth2 Authentication" description = "Grafana OAuthentication Guide " keywords = ["grafana", "configuration", "documentation", "oauth"] type = "docs" [menu.docs] name = "GitLab" identifier = "gitlab_oauth" parent = "authentication" weight = 5 +++ # GitLab OAuth2 Authentication To enable the GitLab OAuth2 you must register an application in GitLab. GitLab will generate a client ID and secret key for you to use. ## Create GitLab OAuth keys You need to [create a GitLab OAuth application](https://docs.gitlab.com/ce/integration/oauth_provider.html). Choose a descriptive *Name*, and use the following *Redirect URI*: ``` https://grafana.example.com/login/gitlab ``` where `https://grafana.example.com` is the URL you use to connect to Grafana. Adjust it as needed if you don't use HTTPS or if you use a different port; for instance, if you access Grafana at `http://203.0.113.31:3000`, you should use ``` http://203.0.113.31:3000/login/gitlab ``` Finally, select *api* as the *Scope* and submit the form. Note that if you're not going to use GitLab groups for authorization (i.e. not setting `allowed_groups`, see below), you can select *read_user* instead of *api* as the *Scope*, thus giving a more restricted access to your GitLab API. You'll get an *Application Id* and a *Secret* in return; we'll call them `GITLAB_APPLICATION_ID` and `GITLAB_SECRET` respectively for the rest of this section. ## Enable GitLab in Grafana Add the following to your Grafana configuration file to enable GitLab authentication: ```bash [auth.gitlab] enabled = true allow_sign_up = false client_id = GITLAB_APPLICATION_ID client_secret = GITLAB_SECRET scopes = api auth_url = https://gitlab.com/oauth/authorize token_url = https://gitlab.com/oauth/token api_url = https://gitlab.com/api/v4 allowed_groups = ``` You may have to set the `root_url` option of `[server]` for the callback URL to be correct. For example in case you are serving Grafana behind a proxy. Restart the Grafana backend for your changes to take effect. If you use your own instance of GitLab instead of `gitlab.com`, adjust `auth_url`, `token_url` and `api_url` accordingly by replacing the `gitlab.com` hostname with your own. With `allow_sign_up` set to `false`, only existing users will be able to login using their GitLab account, but with `allow_sign_up` set to `true`, *any* user who can authenticate on GitLab will be able to login on your Grafana instance; if you use the public `gitlab.com`, it means anyone in the world would be able to login on your Grafana instance. You can can however limit access to only members of a given group or list of groups by setting the `allowed_groups` option. ### allowed_groups To limit access to authenticated users that are members of one or more [GitLab groups](https://docs.gitlab.com/ce/user/group/index.html), set `allowed_groups` to a comma- or space-separated list of groups. For instance, if you want to only give access to members of the `example` group, set ```ini allowed_groups = example ``` If you want to also give access to members of the subgroup `bar`, which is in the group `foo`, set ```ini allowed_groups = example, foo/bar ``` Note that in GitLab, the group or subgroup name doesn't always match its display name, especially if the display name contains spaces or special characters. Make sure you always use the group or subgroup name as it appears in the URL of the group or subgroup. Here's a complete example with `allow_sign_up` enabled, and access limited to the `example` and `foo/bar` groups: ```ini [auth.gitlab] enabled = true allow_sign_up = true client_id = GITLAB_APPLICATION_ID client_secret = GITLAB_SECRET scopes = api auth_url = https://gitlab.com/oauth/authorize token_url = https://gitlab.com/oauth/token api_url = https://gitlab.com/api/v4 allowed_groups = example, foo/bar ```
docs/sources/auth/gitlab.md
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017671311798039824, 0.0001721459993859753, 0.00016727908223401755, 0.00017187756020575762, 0.0000023650591174373403 ]
{ "id": 2, "code_window": [ " // Some datasource's query builders allow per-query interval limits,\n", " // but we're using the datasource interval limit for now\n", " const interval = datasourceInstance.interval;\n", "\n", " dispatch(runQueriesAction());\n", " // Keep table queries first since they need to return quickly\n", " if ((ignoreUIState || showingTable) && supportsTable) {\n", " dispatch(\n", " runQueriesForType(\n", " exploreId,\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " dispatch(runQueriesAction({ exploreId }));\n" ], "file_path": "public/app/features/explore/state/actions.ts", "type": "replace", "edit_start_line_idx": 555 }
// Libraries // @ts-ignore import _ from 'lodash'; // Services & Utils import store from 'app/core/store'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { Emitter } from 'app/core/core'; import { LAST_USED_DATASOURCE_KEY, clearQueryKeys, ensureQueries, generateEmptyQuery, hasNonEmptyQuery, makeTimeSeriesList, updateHistory, buildQueryTransaction, serializeStateToUrlParam, parseUrlState, } from 'app/core/utils/explore'; // Actions import { updateLocation } from 'app/core/actions'; // Types import { ThunkResult } from 'app/types'; import { RawTimeRange, TimeRange, DataSourceApi, DataQuery, DataSourceSelectItem, QueryHint, QueryFixAction, } from '@grafana/ui/src/types'; import { ExploreId, ExploreUrlState, RangeScanner, ResultType, QueryOptions, ExploreUIState, QueryTransaction, } from 'app/types/explore'; import { updateDatasourceInstanceAction, changeQueryAction, changeSizeAction, ChangeSizePayload, changeTimeAction, scanStopAction, clearQueriesAction, initializeExploreAction, loadDatasourceMissingAction, loadDatasourcePendingAction, queriesImportedAction, LoadDatasourceReadyPayload, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, scanRangeAction, scanStartAction, setQueriesAction, splitCloseAction, splitOpenAction, addQueryRowAction, toggleGraphAction, toggleLogsAction, toggleTableAction, ToggleGraphPayload, ToggleLogsPayload, ToggleTablePayload, updateUIStateAction, runQueriesAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, loadExploreDatasources, } from './actionTypes'; import { ActionOf, ActionCreator } from 'app/core/redux/actionCreatorFactory'; import { LogsDedupStrategy } from 'app/core/logs_model'; import { parseTime } from '../TimePicker'; /** * Updates UI state and save it to the URL */ const updateExploreUIState = (exploreId: ExploreId, uiStateFragment: Partial<ExploreUIState>): ThunkResult<void> => { return dispatch => { dispatch(updateUIStateAction({ exploreId, ...uiStateFragment })); dispatch(stateSave()); }; }; /** * Adds a query row after the row with the given index. */ export function addQueryRow(exploreId: ExploreId, index: number): ThunkResult<void> { return (dispatch, getState) => { const query = generateEmptyQuery(getState().explore[exploreId].queries, index); dispatch(addQueryRowAction({ exploreId, index, query })); }; } /** * Loads a new datasource identified by the given name. */ export function changeDatasource(exploreId: ExploreId, datasource: string): ThunkResult<void> { return async (dispatch, getState) => { let newDataSourceInstance: DataSourceApi = null; if (!datasource) { newDataSourceInstance = await getDatasourceSrv().get(); } else { newDataSourceInstance = await getDatasourceSrv().get(datasource); } const currentDataSourceInstance = getState().explore[exploreId].datasourceInstance; const queries = getState().explore[exploreId].queries; await dispatch(importQueries(exploreId, queries, currentDataSourceInstance, newDataSourceInstance)); dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance })); await dispatch(loadDatasource(exploreId, newDataSourceInstance)); dispatch(runQueries(exploreId)); }; } /** * Query change handler for the query row with the given index. * If `override` is reset the query modifications and run the queries. Use this to set queries via a link. */ export function changeQuery( exploreId: ExploreId, query: DataQuery, index: number, override: boolean ): ThunkResult<void> { return (dispatch, getState) => { // Null query means reset if (query === null) { query = { ...generateEmptyQuery(getState().explore[exploreId].queries) }; } dispatch(changeQueryAction({ exploreId, query, index, override })); if (override) { dispatch(runQueries(exploreId)); } }; } /** * Keep track of the Explore container size, in particular the width. * The width will be used to calculate graph intervals (number of datapoints). */ export function changeSize( exploreId: ExploreId, { height, width }: { height: number; width: number } ): ActionOf<ChangeSizePayload> { return changeSizeAction({ exploreId, height, width }); } /** * Change the time range of Explore. Usually called from the Timepicker or a graph interaction. */ export function changeTime(exploreId: ExploreId, range: TimeRange): ThunkResult<void> { return dispatch => { dispatch(changeTimeAction({ exploreId, range })); dispatch(runQueries(exploreId)); }; } /** * Clear all queries and results. */ export function clearQueries(exploreId: ExploreId): ThunkResult<void> { return dispatch => { dispatch(scanStopAction({ exploreId })); dispatch(clearQueriesAction({ exploreId })); dispatch(stateSave()); }; } /** * Loads all explore data sources and sets the chosen datasource. * If there are no datasources a missing datasource action is dispatched. */ export function loadExploreDatasourcesAndSetDatasource( exploreId: ExploreId, datasourceName: string ): ThunkResult<void> { return dispatch => { const exploreDatasources: DataSourceSelectItem[] = getDatasourceSrv() .getExternal() .map((ds: any) => ({ value: ds.name, name: ds.name, meta: ds.meta, })); dispatch(loadExploreDatasources({ exploreId, exploreDatasources })); if (exploreDatasources.length >= 1) { dispatch(changeDatasource(exploreId, datasourceName)); } else { dispatch(loadDatasourceMissingAction({ exploreId })); } }; } /** * Initialize Explore state with state from the URL and the React component. * Call this only on components for with the Explore state has not been initialized. */ export function initializeExplore( exploreId: ExploreId, datasourceName: string, queries: DataQuery[], range: RawTimeRange, containerWidth: number, eventBridge: Emitter, ui: ExploreUIState ): ThunkResult<void> { return async dispatch => { dispatch(loadExploreDatasourcesAndSetDatasource(exploreId, datasourceName)); dispatch( initializeExploreAction({ exploreId, containerWidth, eventBridge, queries, range, ui, }) ); }; } /** * Datasource loading was successfully completed. */ export const loadDatasourceReady = ( exploreId: ExploreId, instance: DataSourceApi ): ActionOf<LoadDatasourceReadyPayload> => { const historyKey = `grafana.explore.history.${instance.meta.id}`; const history = store.getObject(historyKey, []); // Save last-used datasource store.set(LAST_USED_DATASOURCE_KEY, instance.name); return loadDatasourceReadyAction({ exploreId, history, }); }; export function importQueries( exploreId: ExploreId, queries: DataQuery[], sourceDataSource: DataSourceApi, targetDataSource: DataSourceApi ): ThunkResult<void> { return async dispatch => { if (!sourceDataSource) { // explore not initialized dispatch(queriesImportedAction({ exploreId, queries })); return; } let importedQueries = queries; // Check if queries can be imported from previously selected datasource if (sourceDataSource.meta.id === targetDataSource.meta.id) { // Keep same queries if same type of datasource importedQueries = [...queries]; } else if (targetDataSource.importQueries) { // Datasource-specific importers importedQueries = await targetDataSource.importQueries(queries, sourceDataSource.meta); } else { // Default is blank queries importedQueries = ensureQueries(); } const nextQueries = importedQueries.map((q, i) => ({ ...q, ...generateEmptyQuery(queries), })); dispatch(queriesImportedAction({ exploreId, queries: nextQueries })); }; } /** * Tests datasource. */ export const testDatasource = (exploreId: ExploreId, instance: DataSourceApi): ThunkResult<void> => { return async dispatch => { let datasourceError = null; dispatch(testDataSourcePendingAction({ exploreId })); try { const testResult = await instance.testDatasource(); datasourceError = testResult.status === 'success' ? null : testResult.message; } catch (error) { datasourceError = (error && error.statusText) || 'Network error'; } if (datasourceError) { dispatch(testDataSourceFailureAction({ exploreId, error: datasourceError })); return; } dispatch(testDataSourceSuccessAction({ exploreId })); }; }; /** * Reconnects datasource when there is a connection failure. */ export const reconnectDatasource = (exploreId: ExploreId): ThunkResult<void> => { return async (dispatch, getState) => { const instance = getState().explore[exploreId].datasourceInstance; dispatch(changeDatasource(exploreId, instance.name)); }; }; /** * Main action to asynchronously load a datasource. Dispatches lots of smaller actions for feedback. */ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): ThunkResult<void> { return async (dispatch, getState) => { const datasourceName = instance.name; // Keep ID to track selection dispatch(loadDatasourcePendingAction({ exploreId, requestedDatasourceName: datasourceName })); await dispatch(testDatasource(exploreId, instance)); if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) { // User already changed datasource again, discard results return; } if (instance.init) { instance.init(); } if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) { // User already changed datasource again, discard results return; } dispatch(loadDatasourceReady(exploreId, instance)); }; } /** * Action to modify a query given a datasource-specific modifier action. * @param exploreId Explore area * @param modification Action object with a type, e.g., ADD_FILTER * @param index Optional query row index. If omitted, the modification is applied to all query rows. * @param modifier Function that executes the modification, typically `datasourceInstance.modifyQueries`. */ export function modifyQueries( exploreId: ExploreId, modification: QueryFixAction, index: number, modifier: any ): ThunkResult<void> { return dispatch => { dispatch(modifyQueriesAction({ exploreId, modification, index, modifier })); if (!modification.preventSubmit) { dispatch(runQueries(exploreId)); } }; } /** * Mark a query transaction as failed with an error extracted from the query response. * The transaction will be marked as `done`. */ export function queryTransactionFailure( exploreId: ExploreId, transactionId: string, response: any, datasourceId: string ): ThunkResult<void> { return (dispatch, getState) => { const { datasourceInstance, queryTransactions } = getState().explore[exploreId]; if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { // Navigated away, queries did not matter return; } // Transaction might have been discarded if (!queryTransactions.find(qt => qt.id === transactionId)) { return; } console.error(response); let error: string; let errorDetails: string; if (response.data) { if (typeof response.data === 'string') { error = response.data; } else if (response.data.error) { error = response.data.error; if (response.data.response) { errorDetails = response.data.response; } } else { throw new Error('Could not handle error response'); } } else if (response.message) { error = response.message; } else if (typeof response === 'string') { error = response; } else { error = 'Unknown error during query transaction. Please check JS console logs.'; } // Mark transactions as complete const nextQueryTransactions = queryTransactions.map(qt => { if (qt.id === transactionId) { return { ...qt, error, errorDetails, done: true, }; } return qt; }); dispatch(queryTransactionFailureAction({ exploreId, queryTransactions: nextQueryTransactions })); }; } /** * Complete a query transaction, mark the transaction as `done` and store query state in URL. * If the transaction was started by a scanner, it keeps on scanning for more results. * Side-effect: the query is stored in localStorage. * @param exploreId Explore area * @param transactionId ID * @param result Response from `datasourceInstance.query()` * @param latency Duration between request and response * @param queries Queries from all query rows * @param datasourceId Origin datasource instance, used to discard results if current datasource is different */ export function queryTransactionSuccess( exploreId: ExploreId, transactionId: string, result: any, latency: number, queries: DataQuery[], datasourceId: string ): ThunkResult<void> { return (dispatch, getState) => { const { datasourceInstance, history, queryTransactions, scanner, scanning } = getState().explore[exploreId]; // If datasource already changed, results do not matter if (datasourceInstance.meta.id !== datasourceId) { return; } // Transaction might have been discarded const transaction = queryTransactions.find(qt => qt.id === transactionId); if (!transaction) { return; } // Get query hints let hints: QueryHint[]; if (datasourceInstance.getQueryHints) { hints = datasourceInstance.getQueryHints(transaction.query, result); } // Mark transactions as complete and attach result const nextQueryTransactions = queryTransactions.map(qt => { if (qt.id === transactionId) { return { ...qt, hints, latency, result, done: true, }; } return qt; }); // Side-effect: Saving history in localstorage const nextHistory = updateHistory(history, datasourceId, queries); dispatch( queryTransactionSuccessAction({ exploreId, history: nextHistory, queryTransactions: nextQueryTransactions, }) ); // Keep scanning for results if this was the last scanning transaction if (scanning) { if (_.size(result) === 0) { const other = nextQueryTransactions.find(qt => qt.scanning && !qt.done); if (!other) { const range = scanner(); dispatch(scanRangeAction({ exploreId, range })); } } else { // We can stop scanning if we have a result dispatch(scanStopAction({ exploreId })); } } }; } /** * Main action to run queries and dispatches sub-actions based on which result viewers are active */ export function runQueries(exploreId: ExploreId, ignoreUIState = false): ThunkResult<void> { return (dispatch, getState) => { const { datasourceInstance, queries, showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable, datasourceError, } = getState().explore[exploreId]; if (datasourceError) { // let's not run any queries if data source is in a faulty state return; } if (!hasNonEmptyQuery(queries)) { dispatch(clearQueriesAction({ exploreId })); dispatch(stateSave()); // Remember to saves to state and update location return; } // Some datasource's query builders allow per-query interval limits, // but we're using the datasource interval limit for now const interval = datasourceInstance.interval; dispatch(runQueriesAction()); // Keep table queries first since they need to return quickly if ((ignoreUIState || showingTable) && supportsTable) { dispatch( runQueriesForType( exploreId, 'Table', { interval, format: 'table', instant: true, valueWithRefId: true, }, (data: any) => data[0] ) ); } if ((ignoreUIState || showingGraph) && supportsGraph) { dispatch( runQueriesForType( exploreId, 'Graph', { interval, format: 'time_series', instant: false, }, makeTimeSeriesList ) ); } if ((ignoreUIState || showingLogs) && supportsLogs) { dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' })); } dispatch(stateSave()); }; } /** * Helper action to build a query transaction object and handing the query to the datasource. * @param exploreId Explore area * @param resultType Result viewer that will be associated with this query result * @param queryOptions Query options as required by the datasource's `query()` function. * @param resultGetter Optional result extractor, e.g., if the result is a list and you only need the first element. */ function runQueriesForType( exploreId: ExploreId, resultType: ResultType, queryOptions: QueryOptions, resultGetter?: any ): ThunkResult<void> { return async (dispatch, getState) => { const { datasourceInstance, eventBridge, queries, queryIntervals, range, scanning } = getState().explore[exploreId]; const datasourceId = datasourceInstance.meta.id; // Run all queries concurrently queries.forEach(async (query, rowIndex) => { const transaction = buildQueryTransaction( query, rowIndex, resultType, queryOptions, range, queryIntervals, scanning ); dispatch(queryTransactionStartAction({ exploreId, resultType, rowIndex, transaction })); try { const now = Date.now(); const res = await datasourceInstance.query(transaction.options); eventBridge.emit('data-received', res.data || []); const latency = Date.now() - now; const { queryTransactions } = getState().explore[exploreId]; const results = resultGetter ? resultGetter(res.data, transaction, queryTransactions) : res.data; dispatch(queryTransactionSuccess(exploreId, transaction.id, results, latency, queries, datasourceId)); } catch (response) { eventBridge.emit('data-error', response); dispatch(queryTransactionFailure(exploreId, transaction.id, response, datasourceId)); } }); }; } /** * Start a scan for more results using the given scanner. * @param exploreId Explore area * @param scanner Function that a) returns a new time range and b) triggers a query run for the new range */ export function scanStart(exploreId: ExploreId, scanner: RangeScanner): ThunkResult<void> { return dispatch => { // Register the scanner dispatch(scanStartAction({ exploreId, scanner })); // Scanning must trigger query run, and return the new range const range = scanner(); // Set the new range to be displayed dispatch(scanRangeAction({ exploreId, range })); }; } /** * Reset queries to the given queries. Any modifications will be discarded. * Use this action for clicks on query examples. Triggers a query run. */ export function setQueries(exploreId: ExploreId, rawQueries: DataQuery[]): ThunkResult<void> { return (dispatch, getState) => { // Inject react keys into query objects const queries = rawQueries.map(q => ({ ...q, ...generateEmptyQuery(getState().explore[exploreId].queries) })); dispatch(setQueriesAction({ exploreId, queries })); dispatch(runQueries(exploreId)); }; } /** * Close the split view and save URL state. */ export function splitClose(itemId: ExploreId): ThunkResult<void> { return dispatch => { dispatch(splitCloseAction({ itemId })); dispatch(stateSave()); }; } /** * Open the split view and copy the left state to be the right state. * The right state is automatically initialized. * The copy keeps all query modifications but wipes the query results. */ export function splitOpen(): ThunkResult<void> { return (dispatch, getState) => { // Clone left state to become the right state const leftState = getState().explore[ExploreId.left]; const queryState = getState().location.query[ExploreId.left] as string; const urlState = parseUrlState(queryState); const queryTransactions: QueryTransaction[] = []; const itemState = { ...leftState, queryTransactions, queries: leftState.queries.slice(), exploreId: ExploreId.right, urlState, }; dispatch(splitOpenAction({ itemState })); dispatch(stateSave()); }; } /** * Saves Explore state to URL using the `left` and `right` parameters. * If split view is not active, `right` will not be set. */ export function stateSave(): ThunkResult<void> { return (dispatch, getState) => { const { left, right, split } = getState().explore; const urlStates: { [index: string]: string } = {}; const leftUrlState: ExploreUrlState = { datasource: left.datasourceInstance.name, queries: left.queries.map(clearQueryKeys), range: left.range, ui: { showingGraph: left.showingGraph, showingLogs: left.showingLogs, showingTable: left.showingTable, dedupStrategy: left.dedupStrategy, }, }; urlStates.left = serializeStateToUrlParam(leftUrlState, true); if (split) { const rightUrlState: ExploreUrlState = { datasource: right.datasourceInstance.name, queries: right.queries.map(clearQueryKeys), range: right.range, ui: { showingGraph: right.showingGraph, showingLogs: right.showingLogs, showingTable: right.showingTable, dedupStrategy: right.dedupStrategy, }, }; urlStates.right = serializeStateToUrlParam(rightUrlState, true); } dispatch(updateLocation({ query: urlStates })); }; } /** * Creates action to collapse graph/logs/table panel. When panel is collapsed, * queries won't be run */ const togglePanelActionCreator = ( actionCreator: | ActionCreator<ToggleGraphPayload> | ActionCreator<ToggleLogsPayload> | ActionCreator<ToggleTablePayload> ) => (exploreId: ExploreId, isPanelVisible: boolean): ThunkResult<void> => { return dispatch => { let uiFragmentStateUpdate: Partial<ExploreUIState>; const shouldRunQueries = !isPanelVisible; switch (actionCreator.type) { case toggleGraphAction.type: uiFragmentStateUpdate = { showingGraph: !isPanelVisible }; break; case toggleLogsAction.type: uiFragmentStateUpdate = { showingLogs: !isPanelVisible }; break; case toggleTableAction.type: uiFragmentStateUpdate = { showingTable: !isPanelVisible }; break; } dispatch(actionCreator({ exploreId })); dispatch(updateExploreUIState(exploreId, uiFragmentStateUpdate)); if (shouldRunQueries) { dispatch(runQueries(exploreId)); } }; }; /** * Expand/collapse the graph result viewer. When collapsed, graph queries won't be run. */ export const toggleGraph = togglePanelActionCreator(toggleGraphAction); /** * Expand/collapse the logs result viewer. When collapsed, log queries won't be run. */ export const toggleLogs = togglePanelActionCreator(toggleLogsAction); /** * Expand/collapse the table result viewer. When collapsed, table queries won't be run. */ export const toggleTable = togglePanelActionCreator(toggleTableAction); /** * Change logs deduplication strategy and update URL. */ export const changeDedupStrategy = (exploreId: ExploreId, dedupStrategy: LogsDedupStrategy): ThunkResult<void> => { return dispatch => { dispatch(updateExploreUIState(exploreId, { dedupStrategy })); }; }; export function refreshExplore(exploreId: ExploreId): ThunkResult<void> { return (dispatch, getState) => { const itemState = getState().explore[exploreId]; if (!itemState.initialized) { return; } const { urlState, update, containerWidth, eventBridge } = itemState; const { datasource, queries, range, ui } = urlState; const refreshQueries = queries.map(q => ({ ...q, ...generateEmptyQuery(itemState.queries) })); const refreshRange = { from: parseTime(range.from), to: parseTime(range.to) }; // need to refresh datasource if (update.datasource) { const initialQueries = ensureQueries(queries); const initialRange = { from: parseTime(range.from), to: parseTime(range.to) }; dispatch(initializeExplore(exploreId, datasource, initialQueries, initialRange, containerWidth, eventBridge, ui)); return; } if (update.range) { dispatch(changeTimeAction({ exploreId, range: refreshRange as TimeRange })); } // need to refresh ui state if (update.ui) { dispatch(updateUIStateAction({ ...ui, exploreId })); } // need to refresh queries if (update.queries) { dispatch(setQueriesAction({ exploreId, queries: refreshQueries })); } // always run queries when refresh is needed if (update.queries || update.ui || update.range) { dispatch(runQueries(exploreId)); } }; }
public/app/features/explore/state/actions.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9985851049423218, 0.0497695691883564, 0.0001648519973969087, 0.0009089108789339662, 0.2105499655008316 ]
{ "id": 2, "code_window": [ " // Some datasource's query builders allow per-query interval limits,\n", " // but we're using the datasource interval limit for now\n", " const interval = datasourceInstance.interval;\n", "\n", " dispatch(runQueriesAction());\n", " // Keep table queries first since they need to return quickly\n", " if ((ignoreUIState || showingTable) && supportsTable) {\n", " dispatch(\n", " runQueriesForType(\n", " exploreId,\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " dispatch(runQueriesAction({ exploreId }));\n" ], "file_path": "public/app/features/explore/state/actions.ts", "type": "replace", "edit_start_line_idx": 555 }
// Copyright 2016 The Xorm Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package xorm import ( "database/sql" "errors" "reflect" ) // Count counts the records. bean's non-empty fields // are conditions. func (session *Session) Count(bean ...interface{}) (int64, error) { if session.isAutoClose { defer session.Close() } var sqlStr string var args []interface{} var err error if session.statement.RawSQL == "" { sqlStr, args, err = session.statement.genCountSQL(bean...) if err != nil { return 0, err } } else { sqlStr = session.statement.RawSQL args = session.statement.RawParams } var total int64 err = session.queryRow(sqlStr, args...).Scan(&total) if err == sql.ErrNoRows || err == nil { return total, nil } return 0, err } // sum call sum some column. bean's non-empty fields are conditions. func (session *Session) sum(res interface{}, bean interface{}, columnNames ...string) error { if session.isAutoClose { defer session.Close() } v := reflect.ValueOf(res) if v.Kind() != reflect.Ptr { return errors.New("need a pointer to a variable") } var isSlice = v.Elem().Kind() == reflect.Slice var sqlStr string var args []interface{} var err error if len(session.statement.RawSQL) == 0 { sqlStr, args, err = session.statement.genSumSQL(bean, columnNames...) if err != nil { return err } } else { sqlStr = session.statement.RawSQL args = session.statement.RawParams } if isSlice { err = session.queryRow(sqlStr, args...).ScanSlice(res) } else { err = session.queryRow(sqlStr, args...).Scan(res) } if err == sql.ErrNoRows || err == nil { return nil } return err } // Sum call sum some column. bean's non-empty fields are conditions. func (session *Session) Sum(bean interface{}, columnName string) (res float64, err error) { return res, session.sum(&res, bean, columnName) } // SumInt call sum some column. bean's non-empty fields are conditions. func (session *Session) SumInt(bean interface{}, columnName string) (res int64, err error) { return res, session.sum(&res, bean, columnName) } // Sums call sum some columns. bean's non-empty fields are conditions. func (session *Session) Sums(bean interface{}, columnNames ...string) ([]float64, error) { var res = make([]float64, len(columnNames), len(columnNames)) return res, session.sum(&res, bean, columnNames...) } // SumsInt sum specify columns and return as []int64 instead of []float64 func (session *Session) SumsInt(bean interface{}, columnNames ...string) ([]int64, error) { var res = make([]int64, len(columnNames), len(columnNames)) return res, session.sum(&res, bean, columnNames...) }
vendor/github.com/go-xorm/xorm/session_stats.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017854584439191967, 0.0001690452336333692, 0.00016459738253615797, 0.0001692211371846497, 0.000003864944119413849 ]
{ "id": 2, "code_window": [ " // Some datasource's query builders allow per-query interval limits,\n", " // but we're using the datasource interval limit for now\n", " const interval = datasourceInstance.interval;\n", "\n", " dispatch(runQueriesAction());\n", " // Keep table queries first since they need to return quickly\n", " if ((ignoreUIState || showingTable) && supportsTable) {\n", " dispatch(\n", " runQueriesForType(\n", " exploreId,\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " dispatch(runQueriesAction({ exploreId }));\n" ], "file_path": "public/app/features/explore/state/actions.ts", "type": "replace", "edit_start_line_idx": 555 }
+++ title = "Alerting Notification Channels HTTP API " description = "Grafana Alerting Notification Channel HTTP API" keywords = ["grafana", "http", "documentation", "api", "alerting", "alerts", "notifications"] aliases = [] type = "docs" [menu.docs] name = "Alerting Notification Channels" parent = "http_api" +++ # Alerting Notification Channels API ## Identifier (id) vs unique identifier (uid) The identifier (id) of a notification channel is an auto-incrementing numeric value and is only unique per Grafana install. The unique identifier (uid) of a notification channel can be used for uniquely identify a notification channel between multiple Grafana installs. It's automatically generated if not provided when creating a notification channel. The uid allows having consistent URL's for accessing notification channels and when syncing notification channels between multiple Grafana installs, see [alert notification channel provisioning](/administration/provisioning/#alert-notification-channels) for more information. The uid can have a maximum length of 40 characters. ## Get all notification channels Returns all notification channels that the authenticated user has permission to view. `GET /api/alert-notifications` **Example Request**: ```http GET /api/alert-notifications HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json [ { "id": 1, "uid": "team-a-email-notifier", "name": "Team A", "type": "email", "isDefault": false, "sendReminder": false, "disableResolveMessage": false, "settings": { "addresses": "[email protected];[email protected]" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" } ] ``` ## Get notification channel by uid `GET /api/alert-notifications/uid/:uid` Will return the notification channel given the notification channel uid. **Example Request**: ```http GET /api/alert-notifications/uid/team-a-email-notifier HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "id": 1, "uid": "team-a-email-notifier", "name": "Team A", "type": "email", "isDefault": false, "sendReminder": false, "disableResolveMessage": false, "settings": { "addresses": "[email protected];[email protected]" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" } ``` ## Get notification channel by id `GET /api/alert-notifications/:id` Will return the notification channel given the notification channel id. **Example Request**: ```http GET /api/alert-notifications/1 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "id": 1, "uid": "team-a-email-notifier", "name": "Team A", "type": "email", "isDefault": false, "sendReminder": false, "disableResolveMessage": false, "settings": { "addresses": "[email protected];[email protected]" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" } ``` ## Create notification channel You can find the full list of [supported notifiers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. `POST /api/alert-notifications` **Example Request**: ```http POST /api/alert-notifications HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "uid": "new-alert-notification", // optional "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, "sendReminder": false, "settings": { "addresses": "[email protected];[email protected]" } } ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "id": 1, "uid": "new-alert-notification", "name": "new alert notification", "type": "email", "isDefault": false, "sendReminder": false, "settings": { "addresses": "[email protected];[email protected]" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" } ``` ## Update notification channel by uid `PUT /api/alert-notifications/uid/:uid` Updates an existing notification channel identified by uid. **Example Request**: ```http PUT /api/alert-notifications/uid/cIBgcSjkk HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "uid": "new-alert-notification", // optional "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, "sendReminder": true, "frequency": "15m", "settings": { "addresses": "[email protected];[email protected]" } } ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "id": 1, "uid": "new-alert-notification", "name": "new alert notification", "type": "email", "isDefault": false, "sendReminder": true, "frequency": "15m", "settings": { "addresses": "[email protected];[email protected]" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" } ``` ## Update notification channel by id `PUT /api/alert-notifications/:id` Updates an existing notification channel identified by id. **Example Request**: ```http PUT /api/alert-notifications/1 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "id": 1, "uid": "new-alert-notification", // optional "name": "new alert notification", //Required "type": "email", //Required "isDefault": false, "sendReminder": true, "frequency": "15m", "settings": { "addresses": "[email protected];[email protected]" } } ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "id": 1, "uid": "new-alert-notification", "name": "new alert notification", "type": "email", "isDefault": false, "sendReminder": true, "frequency": "15m", "settings": { "addresses": "[email protected];[email protected]" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" } ``` ## Delete alert notification by uid `DELETE /api/alert-notifications/uid/:uid` Deletes an existing notification channel identified by uid. **Example Request**: ```http DELETE /api/alert-notifications/uid/team-a-email-notifier HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "message": "Notification deleted" } ``` ## Delete alert notification by id `DELETE /api/alert-notifications/:id` Deletes an existing notification channel identified by id. **Example Request**: ```http DELETE /api/alert-notifications/1 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "message": "Notification deleted" } ``` ## Test notification channel Sends a test notification message for the given notification channel type and settings. You can find the full list of [supported notifiers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. `POST /api/alert-notifications/test` **Example Request**: ```http POST /api/alert-notifications/test HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "type": "email", "settings": { "addresses": "[email protected];[email protected]" } } ``` **Example Response**: ```http HTTP/1.1 200 Content-Type: application/json { "message": "Test notification sent" } ```
docs/sources/http_api/alerting_notification_channels.md
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0001762201136443764, 0.00017091372865252197, 0.00016492480062879622, 0.00017080476391129196, 0.0000026705222353484714 ]
{ "id": 2, "code_window": [ " // Some datasource's query builders allow per-query interval limits,\n", " // but we're using the datasource interval limit for now\n", " const interval = datasourceInstance.interval;\n", "\n", " dispatch(runQueriesAction());\n", " // Keep table queries first since they need to return quickly\n", " if ((ignoreUIState || showingTable) && supportsTable) {\n", " dispatch(\n", " runQueriesForType(\n", " exploreId,\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " dispatch(runQueriesAction({ exploreId }));\n" ], "file_path": "public/app/features/explore/state/actions.ts", "type": "replace", "edit_start_line_idx": 555 }
// Copyright 2015 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package bidi import ( "container/list" "fmt" "sort" ) // This file contains a port of the reference implementation of the // Bidi Parentheses Algorithm: // http://www.unicode.org/Public/PROGRAMS/BidiReferenceJava/BidiPBAReference.java // // The implementation in this file covers definitions BD14-BD16 and rule N0 // of UAX#9. // // Some preprocessing is done for each rune before data is passed to this // algorithm: // - opening and closing brackets are identified // - a bracket pair type, like '(' and ')' is assigned a unique identifier that // is identical for the opening and closing bracket. It is left to do these // mappings. // - The BPA algorithm requires that bracket characters that are canonical // equivalents of each other be able to be substituted for each other. // It is the responsibility of the caller to do this canonicalization. // // In implementing BD16, this implementation departs slightly from the "logical" // algorithm defined in UAX#9. In particular, the stack referenced there // supports operations that go beyond a "basic" stack. An equivalent // implementation based on a linked list is used here. // Bidi_Paired_Bracket_Type // BD14. An opening paired bracket is a character whose // Bidi_Paired_Bracket_Type property value is Open. // // BD15. A closing paired bracket is a character whose // Bidi_Paired_Bracket_Type property value is Close. type bracketType byte const ( bpNone bracketType = iota bpOpen bpClose ) // bracketPair holds a pair of index values for opening and closing bracket // location of a bracket pair. type bracketPair struct { opener int closer int } func (b *bracketPair) String() string { return fmt.Sprintf("(%v, %v)", b.opener, b.closer) } // bracketPairs is a slice of bracketPairs with a sort.Interface implementation. type bracketPairs []bracketPair func (b bracketPairs) Len() int { return len(b) } func (b bracketPairs) Swap(i, j int) { b[i], b[j] = b[j], b[i] } func (b bracketPairs) Less(i, j int) bool { return b[i].opener < b[j].opener } // resolvePairedBrackets runs the paired bracket part of the UBA algorithm. // // For each rune, it takes the indexes into the original string, the class the // bracket type (in pairTypes) and the bracket identifier (pairValues). It also // takes the direction type for the start-of-sentence and the embedding level. // // The identifiers for bracket types are the rune of the canonicalized opening // bracket for brackets (open or close) or 0 for runes that are not brackets. func resolvePairedBrackets(s *isolatingRunSequence) { p := bracketPairer{ sos: s.sos, openers: list.New(), codesIsolatedRun: s.types, indexes: s.indexes, } dirEmbed := L if s.level&1 != 0 { dirEmbed = R } p.locateBrackets(s.p.pairTypes, s.p.pairValues) p.resolveBrackets(dirEmbed, s.p.initialTypes) } type bracketPairer struct { sos Class // direction corresponding to start of sequence // The following is a restatement of BD 16 using non-algorithmic language. // // A bracket pair is a pair of characters consisting of an opening // paired bracket and a closing paired bracket such that the // Bidi_Paired_Bracket property value of the former equals the latter, // subject to the following constraints. // - both characters of a pair occur in the same isolating run sequence // - the closing character of a pair follows the opening character // - any bracket character can belong at most to one pair, the earliest possible one // - any bracket character not part of a pair is treated like an ordinary character // - pairs may nest properly, but their spans may not overlap otherwise // Bracket characters with canonical decompositions are supposed to be // treated as if they had been normalized, to allow normalized and non- // normalized text to give the same result. In this implementation that step // is pushed out to the caller. The caller has to ensure that the pairValue // slices contain the rune of the opening bracket after normalization for // any opening or closing bracket. openers *list.List // list of positions for opening brackets // bracket pair positions sorted by location of opening bracket pairPositions bracketPairs codesIsolatedRun []Class // directional bidi codes for an isolated run indexes []int // array of index values into the original string } // matchOpener reports whether characters at given positions form a matching // bracket pair. func (p *bracketPairer) matchOpener(pairValues []rune, opener, closer int) bool { return pairValues[p.indexes[opener]] == pairValues[p.indexes[closer]] } const maxPairingDepth = 63 // locateBrackets locates matching bracket pairs according to BD16. // // This implementation uses a linked list instead of a stack, because, while // elements are added at the front (like a push) they are not generally removed // in atomic 'pop' operations, reducing the benefit of the stack archetype. func (p *bracketPairer) locateBrackets(pairTypes []bracketType, pairValues []rune) { // traverse the run // do that explicitly (not in a for-each) so we can record position for i, index := range p.indexes { // look at the bracket type for each character if pairTypes[index] == bpNone || p.codesIsolatedRun[i] != ON { // continue scanning continue } switch pairTypes[index] { case bpOpen: // check if maximum pairing depth reached if p.openers.Len() == maxPairingDepth { p.openers.Init() return } // remember opener location, most recent first p.openers.PushFront(i) case bpClose: // see if there is a match count := 0 for elem := p.openers.Front(); elem != nil; elem = elem.Next() { count++ opener := elem.Value.(int) if p.matchOpener(pairValues, opener, i) { // if the opener matches, add nested pair to the ordered list p.pairPositions = append(p.pairPositions, bracketPair{opener, i}) // remove up to and including matched opener for ; count > 0; count-- { p.openers.Remove(p.openers.Front()) } break } } sort.Sort(p.pairPositions) // if we get here, the closing bracket matched no openers // and gets ignored } } } // Bracket pairs within an isolating run sequence are processed as units so // that both the opening and the closing paired bracket in a pair resolve to // the same direction. // // N0. Process bracket pairs in an isolating run sequence sequentially in // the logical order of the text positions of the opening paired brackets // using the logic given below. Within this scope, bidirectional types EN // and AN are treated as R. // // Identify the bracket pairs in the current isolating run sequence // according to BD16. For each bracket-pair element in the list of pairs of // text positions: // // a Inspect the bidirectional types of the characters enclosed within the // bracket pair. // // b If any strong type (either L or R) matching the embedding direction is // found, set the type for both brackets in the pair to match the embedding // direction. // // o [ e ] o -> o e e e o // // o [ o e ] -> o e o e e // // o [ NI e ] -> o e NI e e // // c Otherwise, if a strong type (opposite the embedding direction) is // found, test for adjacent strong types as follows: 1 First, check // backwards before the opening paired bracket until the first strong type // (L, R, or sos) is found. If that first preceding strong type is opposite // the embedding direction, then set the type for both brackets in the pair // to that type. 2 Otherwise, set the type for both brackets in the pair to // the embedding direction. // // o [ o ] e -> o o o o e // // o [ o NI ] o -> o o o NI o o // // e [ o ] o -> e e o e o // // e [ o ] e -> e e o e e // // e ( o [ o ] NI ) e -> e e o o o o NI e e // // d Otherwise, do not set the type for the current bracket pair. Note that // if the enclosed text contains no strong types the paired brackets will // both resolve to the same level when resolved individually using rules N1 // and N2. // // e ( NI ) o -> e ( NI ) o // getStrongTypeN0 maps character's directional code to strong type as required // by rule N0. // // TODO: have separate type for "strong" directionality. func (p *bracketPairer) getStrongTypeN0(index int) Class { switch p.codesIsolatedRun[index] { // in the scope of N0, number types are treated as R case EN, AN, AL, R: return R case L: return L default: return ON } } // classifyPairContent reports the strong types contained inside a Bracket Pair, // assuming the given embedding direction. // // It returns ON if no strong type is found. If a single strong type is found, // it returns this this type. Otherwise it returns the embedding direction. // // TODO: use separate type for "strong" directionality. func (p *bracketPairer) classifyPairContent(loc bracketPair, dirEmbed Class) Class { dirOpposite := ON for i := loc.opener + 1; i < loc.closer; i++ { dir := p.getStrongTypeN0(i) if dir == ON { continue } if dir == dirEmbed { return dir // type matching embedding direction found } dirOpposite = dir } // return ON if no strong type found, or class opposite to dirEmbed return dirOpposite } // classBeforePair determines which strong types are present before a Bracket // Pair. Return R or L if strong type found, otherwise ON. func (p *bracketPairer) classBeforePair(loc bracketPair) Class { for i := loc.opener - 1; i >= 0; i-- { if dir := p.getStrongTypeN0(i); dir != ON { return dir } } // no strong types found, return sos return p.sos } // assignBracketType implements rule N0 for a single bracket pair. func (p *bracketPairer) assignBracketType(loc bracketPair, dirEmbed Class, initialTypes []Class) { // rule "N0, a", inspect contents of pair dirPair := p.classifyPairContent(loc, dirEmbed) // dirPair is now L, R, or N (no strong type found) // the following logical tests are performed out of order compared to // the statement of the rules but yield the same results if dirPair == ON { return // case "d" - nothing to do } if dirPair != dirEmbed { // case "c": strong type found, opposite - check before (c.1) dirPair = p.classBeforePair(loc) if dirPair == dirEmbed || dirPair == ON { // no strong opposite type found before - use embedding (c.2) dirPair = dirEmbed } } // else: case "b", strong type found matching embedding, // no explicit action needed, as dirPair is already set to embedding // direction // set the bracket types to the type found p.setBracketsToType(loc, dirPair, initialTypes) } func (p *bracketPairer) setBracketsToType(loc bracketPair, dirPair Class, initialTypes []Class) { p.codesIsolatedRun[loc.opener] = dirPair p.codesIsolatedRun[loc.closer] = dirPair for i := loc.opener + 1; i < loc.closer; i++ { index := p.indexes[i] if initialTypes[index] != NSM { break } p.codesIsolatedRun[i] = dirPair } for i := loc.closer + 1; i < len(p.indexes); i++ { index := p.indexes[i] if initialTypes[index] != NSM { break } p.codesIsolatedRun[i] = dirPair } } // resolveBrackets implements rule N0 for a list of pairs. func (p *bracketPairer) resolveBrackets(dirEmbed Class, initialTypes []Class) { for _, loc := range p.pairPositions { p.assignBracketType(loc, dirEmbed, initialTypes) } }
vendor/golang.org/x/text/unicode/bidi/bracket.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017897028010338545, 0.00017211043450515717, 0.00016317628615070134, 0.00017258559819310904, 0.0000037819693261553766 ]
{ "id": 3, "code_window": [ " splitCloseAction,\n", " SplitCloseActionPayload,\n", " loadExploreDatasources,\n", "} from './actionTypes';\n", "import { reducerFactory } from 'app/core/redux';\n", "import {\n" ], "labels": [ "keep", "keep", "add", "keep", "keep", "keep" ], "after_edit": [ " runQueriesAction,\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 22 }
// @ts-ignore import _ from 'lodash'; import { calculateResultsFromQueryTransactions, generateEmptyQuery, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, QueryTransaction, ExploreId, ExploreUpdateState } from 'app/types/explore'; import { DataQuery } from '@grafana/ui/src/types'; import { HigherOrderAction, ActionTypes, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { addQueryRowAction, changeQueryAction, changeSizeAction, changeTimeAction, clearQueriesAction, highlightLogsExpressionAction, initializeExploreAction, updateDatasourceInstanceAction, loadDatasourceMissingAction, loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, removeQueryRowAction, scanRangeAction, scanStartAction, scanStopAction, setQueriesAction, toggleGraphAction, toggleLogsAction, toggleTableAction, queriesImportedAction, updateUIStateAction, toggleLogLevelAction, } from './actionTypes'; import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; export const DEFAULT_RANGE = { from: 'now-6h', to: 'now', }; // Millies step for helper bar charts const DEFAULT_GRAPH_INTERVAL = 15 * 1000; export const makeInitialUpdateState = (): ExploreUpdateState => ({ datasource: false, queries: false, range: false, ui: false, }); /** * Returns a fresh Explore area state */ export const makeExploreItemState = (): ExploreItemState => ({ StartPage: undefined, containerWidth: 0, datasourceInstance: null, requestedDatasourceName: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, exploreDatasources: [], history: [], queries: [], initialized: false, queryTransactions: [], queryIntervals: { interval: '15s', intervalMs: DEFAULT_GRAPH_INTERVAL }, range: DEFAULT_RANGE, scanning: false, scanRange: null, showingGraph: true, showingLogs: true, showingTable: true, supportsGraph: null, supportsLogs: null, supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), }); /** * Global Explore state that handles multiple Explore areas and the split state */ export const initialExploreState: ExploreState = { split: null, left: makeExploreItemState(), right: makeExploreItemState(), }; /** * Reducer for an Explore area, to be used by the global Explore reducer. */ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemState) .addMapper({ filter: addQueryRowAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { index, query } = action.payload; // Add to queries, which will cause a new row to be rendered const nextQueries = [...queries.slice(0, index + 1), { ...query }, ...queries.slice(index + 1)]; // Ongoing transactions need to update their row indices const nextQueryTransactions = queryTransactions.map(qt => { if (qt.rowIndex > index) { return { ...qt, rowIndex: qt.rowIndex + 1, }; } return qt; }); return { ...state, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeQueryAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { query, index } = action.payload; // Override path: queries are completely reset const nextQuery: DataQuery = { ...query, ...generateEmptyQuery(state.queries) }; const nextQueries = [...queries]; nextQueries[index] = nextQuery; // Discard ongoing transaction related to row query const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index); return { ...state, queries: nextQueries, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeSizeAction, mapper: (state, action): ExploreItemState => { const { range, datasourceInstance } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; } const containerWidth = action.payload.width; const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, containerWidth, queryIntervals }; }, }) .addMapper({ filter: changeTimeAction, mapper: (state, action): ExploreItemState => { return { ...state, range: action.payload.range }; }, }) .addMapper({ filter: clearQueriesAction, mapper: (state): ExploreItemState => { const queries = ensureQueries(); return { ...state, queries: queries.slice(), queryTransactions: [], showingStartPage: Boolean(state.StartPage), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: highlightLogsExpressionAction, mapper: (state, action): ExploreItemState => { const { expressions } = action.payload; return { ...state, logsHighlighterExpressions: expressions }; }, }) .addMapper({ filter: initializeExploreAction, mapper: (state, action): ExploreItemState => { const { containerWidth, eventBridge, queries, range, ui } = action.payload; return { ...state, containerWidth, eventBridge, range, queries, initialized: true, queryKeys: getQueryKeys(queries, state.datasourceInstance), ...ui, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: updateDatasourceInstanceAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance } = action.payload; // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; const supportsTable = datasourceInstance.meta.tables; // Custom components const StartPage = datasourceInstance.components.ExploreStartPage; return { ...state, datasourceInstance, supportsGraph, supportsLogs, supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), }; }, }) .addMapper({ filter: loadDatasourceMissingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceMissing: true, datasourceLoading: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadDatasourcePendingAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceLoading: true, requestedDatasourceName: action.payload.requestedDatasourceName, }; }, }) .addMapper({ filter: loadDatasourceReadyAction, mapper: (state, action): ExploreItemState => { const { containerWidth, range, datasourceInstance } = state; const { history } = action.payload; const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth); return { ...state, queryIntervals, history, datasourceLoading: false, datasourceMissing: false, logsHighlighterExpressions: undefined, queryTransactions: [], update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: modifyQueriesAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { modification, index, modifier } = action.payload; let nextQueries: DataQuery[]; let nextQueryTransactions: QueryTransaction[]; if (index === undefined) { // Modify all queries nextQueries = queries.map((query, i) => ({ ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries), })); // Discard all ongoing transactions nextQueryTransactions = []; } else { // Modify query only at index nextQueries = queries.map((query, i) => { // Synchronize all queries with local query cache to ensure consistency // TODO still needed? return i === index ? { ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries) } : query; }); nextQueryTransactions = queryTransactions // Consume the hint corresponding to the action .map(qt => { if (qt.hints != null && qt.rowIndex === index) { qt.hints = qt.hints.filter(hint => hint.fix.action !== modification); } return qt; }) // Preserve previous row query transaction to keep results visible if next query is incomplete .filter(qt => modification.preventSubmit || qt.rowIndex !== index); } return { ...state, queries: nextQueries, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), queryTransactions: nextQueryTransactions, }; }, }) .addMapper({ filter: queryTransactionFailureAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = action.payload; return { ...state, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionStartAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = state; const { resultType, rowIndex, transaction } = action.payload; // Discarding existing transactions of same type const remainingTransactions = queryTransactions.filter( qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex) ); // Append new transaction const nextQueryTransactions: QueryTransaction[] = [...remainingTransactions, transaction]; return { ...state, queryTransactions: nextQueryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionSuccessAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queryIntervals } = state; const { history, queryTransactions } = action.payload; const results = calculateResultsFromQueryTransactions( queryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, history, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queries, queryIntervals, queryTransactions, queryKeys } = state; const { index } = action.payload; if (queries.length <= 1) { return state; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; const nextQueryKeys = [...queryKeys.slice(0, index), ...queryKeys.slice(index + 1)]; // Discard transactions related to row query const nextQueryTransactions = queryTransactions.filter(qt => nextQueries.some(nq => nq.key === qt.query.key)); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: nextQueryKeys, }; }, }) .addMapper({ filter: scanRangeAction, mapper: (state, action): ExploreItemState => { return { ...state, scanRange: action.payload.range }; }, }) .addMapper({ filter: scanStartAction, mapper: (state, action): ExploreItemState => { return { ...state, scanning: true, scanner: action.payload.scanner }; }, }) .addMapper({ filter: scanStopAction, mapper: (state): ExploreItemState => { const { queryTransactions } = state; const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done); return { ...state, queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined, scanner: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: setQueriesAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries: queries.slice(), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: updateUIStateAction, mapper: (state, action): ExploreItemState => { return { ...state, ...action.payload }; }, }) .addMapper({ filter: toggleGraphAction, mapper: (state): ExploreItemState => { const showingGraph = !state.showingGraph; let nextQueryTransactions = state.queryTransactions; if (!showingGraph) { // Discard transactions related to Graph query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleLogsAction, mapper: (state): ExploreItemState => { const showingLogs = !state.showingLogs; let nextQueryTransactions = state.queryTransactions; if (!showingLogs) { // Discard transactions related to Logs query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleTableAction, mapper: (state): ExploreItemState => { const showingTable = !state.showingTable; if (showingTable) { return { ...state, queryTransactions: state.queryTransactions }; } // Toggle off needs discarding of table queries and results const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table'); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, state.datasourceInstance, state.queryIntervals.intervalMs ); return { ...state, ...results, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: queriesImportedAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries, queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: toggleLogLevelAction, mapper: (state, action): ExploreItemState => { const { hiddenLogLevels } = action.payload; return { ...state, hiddenLogLevels: Array.from(hiddenLogLevels), }; }, }) .addMapper({ filter: testDataSourcePendingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceSuccessAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceFailureAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceError: action.payload.error, queryTransactions: [], graphResult: undefined, tableResult: undefined, logsResult: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadExploreDatasources, mapper: (state, action): ExploreItemState => { return { ...state, exploreDatasources: action.payload.exploreDatasources, }; }, }) .create(); export const updateChildRefreshState = ( state: Readonly<ExploreItemState>, payload: LocationUpdate, exploreId: ExploreId ): ExploreItemState => { const path = payload.path || ''; const queryState = payload.query[exploreId] as string; if (!queryState) { return state; } const urlState = parseUrlState(queryState); if (!state.urlState || path !== '/explore') { // we only want to refresh when browser back/forward return { ...state, urlState, update: { datasource: false, queries: false, range: false, ui: false } }; } const datasource = _.isEqual(urlState ? urlState.datasource : '', state.urlState.datasource) === false; const queries = _.isEqual(urlState ? urlState.queries : [], state.urlState.queries) === false; const range = _.isEqual(urlState ? urlState.range : DEFAULT_RANGE, state.urlState.range) === false; const ui = _.isEqual(urlState ? urlState.ui : DEFAULT_UI_STATE, state.urlState.ui) === false; return { ...state, urlState, update: { ...state.update, datasource, queries, range, ui, }, }; }; /** * Global Explore reducer that handles multiple Explore areas (left and right). * Actions that have an `exploreId` get routed to the ExploreItemReducer. */ export const exploreReducer = (state = initialExploreState, action: HigherOrderAction): ExploreState => { switch (action.type) { case splitCloseAction.type: { const { itemId } = action.payload as SplitCloseActionPayload; const targetSplit = { left: itemId === ExploreId.left ? state.right : state.left, right: initialExploreState.right, }; return { ...state, ...targetSplit, split: false, }; } case ActionTypes.SplitOpen: { return { ...state, split: true, right: { ...action.payload.itemState } }; } case ActionTypes.ResetExplore: { return initialExploreState; } case updateLocation.type: { const { query } = action.payload; if (!query || !query[ExploreId.left]) { return state; } const split = query[ExploreId.right] ? true : false; const leftState = state[ExploreId.left]; const rightState = state[ExploreId.right]; return { ...state, split, [ExploreId.left]: updateChildRefreshState(leftState, action.payload, ExploreId.left), [ExploreId.right]: updateChildRefreshState(rightState, action.payload, ExploreId.right), }; } } if (action.payload) { const { exploreId } = action.payload as any; if (exploreId !== undefined) { const exploreItemState = state[exploreId]; return { ...state, [exploreId]: itemReducer(exploreItemState, action) }; } } return state; }; export default { explore: exploreReducer, };
public/app/features/explore/state/reducers.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9982627034187317, 0.04659401997923851, 0.00016422208864241838, 0.00020046615100000054, 0.20755033195018768 ]
{ "id": 3, "code_window": [ " splitCloseAction,\n", " SplitCloseActionPayload,\n", " loadExploreDatasources,\n", "} from './actionTypes';\n", "import { reducerFactory } from 'app/core/redux';\n", "import {\n" ], "labels": [ "keep", "keep", "add", "keep", "keep", "keep" ], "after_edit": [ " runQueriesAction,\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 22 }
Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
vendor/google.golang.org/genproto/LICENSE
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017898633086588234, 0.0001744261389831081, 0.0001701406727079302, 0.00017487385775893927, 0.0000025209790237568086 ]
{ "id": 3, "code_window": [ " splitCloseAction,\n", " SplitCloseActionPayload,\n", " loadExploreDatasources,\n", "} from './actionTypes';\n", "import { reducerFactory } from 'app/core/redux';\n", "import {\n" ], "labels": [ "keep", "keep", "add", "keep", "keep", "keep" ], "after_edit": [ " runQueriesAction,\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 22 }
// Colors $progress-color-dark: $panel-bg !default; $progress-color: $panel-bg !default; $progress-color-light: $panel-bg !default; $progress-color-grey-light: $body-bg !default; $progress-color-shadow: $panel-border !default; $progress-color-grey: $iconContainerBackground !default; $progress-color-grey-dark: $iconContainerBackground !default; // Sizing $marker-size: 60px !default; $marker-size-half: ($marker-size / 2); $path-height: 2px !default; $path-position: $marker-size-half - ($path-height / 2); .dashlist-cta-close-btn { color: $text-color-weak; float: right; padding: 0; margin: 0 2px 0 0; background-color: transparent; border: none; i { font-size: 80%; } &:hover { color: $white; } } // Container element .progress-tracker { display: flex; margin: 0 auto; padding: 0; list-style: none; } // Step container that creates lines between steps .progress-step { text-align: center; position: relative; flex: 1 1 0%; margin: 0; padding: 0; color: $text-color-weak; // For a flexbox bug in firefox that wont allow the text overflow on the text min-width: $marker-size; &::after { content: ''; display: block; position: absolute; z-index: 1; top: $path-position; bottom: $path-position; right: -$marker-size-half; width: 100%; height: $path-height; border-top: 2px solid $progress-color-grey-light; border-bottom: $progress-color-shadow; background: $progress-color-grey-light; } &:first-child { &::after { left: 50%; } } &:last-child { &::after { right: 50%; } } // Active state &.active { .progress-step-cta { display: inline-block; } .progress-title { font-weight: 400; } .progress-text { display: none; } .progress-marker { .icon-gf { color: $brand-primary; -webkit-text-fill-color: transparent; background: $brand-gradient; -webkit-background-clip: text; text-decoration: none; } } } &.completed { .progress-marker { color: $online; // change icon to check .icon-gf::before { content: '\e604'; } } .progress-text { text-decoration: line-through; } &::after { background: $progress-color-grey-light; } } } .progress-step-cta { @include button-size($btn-padding-y-sm, $btn-padding-x-sm, $font-size-sm, $border-radius-sm); @include buttonBackground($btn-primary-bg, $btn-primary-bg-hl); display: none; } // Progress marker .progress-marker { display: flex; justify-content: center; align-items: center; position: relative; width: $marker-size; height: $marker-size; padding-bottom: 2px; // To align text within the marker z-index: 20; background-color: $panel-bg; margin-left: auto; margin-right: auto; margin-bottom: $spacer; color: $text-color-weak; font-size: 35px; vertical-align: sub; } // Progress text .progress-text { display: block; overflow: hidden; text-overflow: ellipsis; color: $text-muted; } .progress-marker { color: $text-color-weak; text-decoration: none; font-size: 35px; vertical-align: sub; } a.progress-link { &:hover { .progress-marker, .progress-text { color: $link-hover-color; } &:hover .progress-marker.completed { color: $online; } } }
public/sass/components/_panel_gettingstarted.scss
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017710514657665044, 0.00017395101895090193, 0.00017123845464084297, 0.0001741280866554007, 0.0000016800063349364791 ]
{ "id": 3, "code_window": [ " splitCloseAction,\n", " SplitCloseActionPayload,\n", " loadExploreDatasources,\n", "} from './actionTypes';\n", "import { reducerFactory } from 'app/core/redux';\n", "import {\n" ], "labels": [ "keep", "keep", "add", "keep", "keep", "keep" ], "after_edit": [ " runQueriesAction,\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 22 }
# Ignore everything in this directory * # Except this file !.gitignore
pkg/services/provisioning/notifiers/testdata/test-configs/empty_folder/.gitignore
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0001754033874021843, 0.0001754033874021843, 0.0001754033874021843, 0.0001754033874021843, 0 ]
{ "id": 4, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: changeSizeAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { range, datasourceInstance } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const containerWidth = action.payload.width;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "replace", "replace", "replace", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 167 }
// Libraries // @ts-ignore import _ from 'lodash'; // Services & Utils import store from 'app/core/store'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { Emitter } from 'app/core/core'; import { LAST_USED_DATASOURCE_KEY, clearQueryKeys, ensureQueries, generateEmptyQuery, hasNonEmptyQuery, makeTimeSeriesList, updateHistory, buildQueryTransaction, serializeStateToUrlParam, parseUrlState, } from 'app/core/utils/explore'; // Actions import { updateLocation } from 'app/core/actions'; // Types import { ThunkResult } from 'app/types'; import { RawTimeRange, TimeRange, DataSourceApi, DataQuery, DataSourceSelectItem, QueryHint, QueryFixAction, } from '@grafana/ui/src/types'; import { ExploreId, ExploreUrlState, RangeScanner, ResultType, QueryOptions, ExploreUIState, QueryTransaction, } from 'app/types/explore'; import { updateDatasourceInstanceAction, changeQueryAction, changeSizeAction, ChangeSizePayload, changeTimeAction, scanStopAction, clearQueriesAction, initializeExploreAction, loadDatasourceMissingAction, loadDatasourcePendingAction, queriesImportedAction, LoadDatasourceReadyPayload, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, scanRangeAction, scanStartAction, setQueriesAction, splitCloseAction, splitOpenAction, addQueryRowAction, toggleGraphAction, toggleLogsAction, toggleTableAction, ToggleGraphPayload, ToggleLogsPayload, ToggleTablePayload, updateUIStateAction, runQueriesAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, loadExploreDatasources, } from './actionTypes'; import { ActionOf, ActionCreator } from 'app/core/redux/actionCreatorFactory'; import { LogsDedupStrategy } from 'app/core/logs_model'; import { parseTime } from '../TimePicker'; /** * Updates UI state and save it to the URL */ const updateExploreUIState = (exploreId: ExploreId, uiStateFragment: Partial<ExploreUIState>): ThunkResult<void> => { return dispatch => { dispatch(updateUIStateAction({ exploreId, ...uiStateFragment })); dispatch(stateSave()); }; }; /** * Adds a query row after the row with the given index. */ export function addQueryRow(exploreId: ExploreId, index: number): ThunkResult<void> { return (dispatch, getState) => { const query = generateEmptyQuery(getState().explore[exploreId].queries, index); dispatch(addQueryRowAction({ exploreId, index, query })); }; } /** * Loads a new datasource identified by the given name. */ export function changeDatasource(exploreId: ExploreId, datasource: string): ThunkResult<void> { return async (dispatch, getState) => { let newDataSourceInstance: DataSourceApi = null; if (!datasource) { newDataSourceInstance = await getDatasourceSrv().get(); } else { newDataSourceInstance = await getDatasourceSrv().get(datasource); } const currentDataSourceInstance = getState().explore[exploreId].datasourceInstance; const queries = getState().explore[exploreId].queries; await dispatch(importQueries(exploreId, queries, currentDataSourceInstance, newDataSourceInstance)); dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance })); await dispatch(loadDatasource(exploreId, newDataSourceInstance)); dispatch(runQueries(exploreId)); }; } /** * Query change handler for the query row with the given index. * If `override` is reset the query modifications and run the queries. Use this to set queries via a link. */ export function changeQuery( exploreId: ExploreId, query: DataQuery, index: number, override: boolean ): ThunkResult<void> { return (dispatch, getState) => { // Null query means reset if (query === null) { query = { ...generateEmptyQuery(getState().explore[exploreId].queries) }; } dispatch(changeQueryAction({ exploreId, query, index, override })); if (override) { dispatch(runQueries(exploreId)); } }; } /** * Keep track of the Explore container size, in particular the width. * The width will be used to calculate graph intervals (number of datapoints). */ export function changeSize( exploreId: ExploreId, { height, width }: { height: number; width: number } ): ActionOf<ChangeSizePayload> { return changeSizeAction({ exploreId, height, width }); } /** * Change the time range of Explore. Usually called from the Timepicker or a graph interaction. */ export function changeTime(exploreId: ExploreId, range: TimeRange): ThunkResult<void> { return dispatch => { dispatch(changeTimeAction({ exploreId, range })); dispatch(runQueries(exploreId)); }; } /** * Clear all queries and results. */ export function clearQueries(exploreId: ExploreId): ThunkResult<void> { return dispatch => { dispatch(scanStopAction({ exploreId })); dispatch(clearQueriesAction({ exploreId })); dispatch(stateSave()); }; } /** * Loads all explore data sources and sets the chosen datasource. * If there are no datasources a missing datasource action is dispatched. */ export function loadExploreDatasourcesAndSetDatasource( exploreId: ExploreId, datasourceName: string ): ThunkResult<void> { return dispatch => { const exploreDatasources: DataSourceSelectItem[] = getDatasourceSrv() .getExternal() .map((ds: any) => ({ value: ds.name, name: ds.name, meta: ds.meta, })); dispatch(loadExploreDatasources({ exploreId, exploreDatasources })); if (exploreDatasources.length >= 1) { dispatch(changeDatasource(exploreId, datasourceName)); } else { dispatch(loadDatasourceMissingAction({ exploreId })); } }; } /** * Initialize Explore state with state from the URL and the React component. * Call this only on components for with the Explore state has not been initialized. */ export function initializeExplore( exploreId: ExploreId, datasourceName: string, queries: DataQuery[], range: RawTimeRange, containerWidth: number, eventBridge: Emitter, ui: ExploreUIState ): ThunkResult<void> { return async dispatch => { dispatch(loadExploreDatasourcesAndSetDatasource(exploreId, datasourceName)); dispatch( initializeExploreAction({ exploreId, containerWidth, eventBridge, queries, range, ui, }) ); }; } /** * Datasource loading was successfully completed. */ export const loadDatasourceReady = ( exploreId: ExploreId, instance: DataSourceApi ): ActionOf<LoadDatasourceReadyPayload> => { const historyKey = `grafana.explore.history.${instance.meta.id}`; const history = store.getObject(historyKey, []); // Save last-used datasource store.set(LAST_USED_DATASOURCE_KEY, instance.name); return loadDatasourceReadyAction({ exploreId, history, }); }; export function importQueries( exploreId: ExploreId, queries: DataQuery[], sourceDataSource: DataSourceApi, targetDataSource: DataSourceApi ): ThunkResult<void> { return async dispatch => { if (!sourceDataSource) { // explore not initialized dispatch(queriesImportedAction({ exploreId, queries })); return; } let importedQueries = queries; // Check if queries can be imported from previously selected datasource if (sourceDataSource.meta.id === targetDataSource.meta.id) { // Keep same queries if same type of datasource importedQueries = [...queries]; } else if (targetDataSource.importQueries) { // Datasource-specific importers importedQueries = await targetDataSource.importQueries(queries, sourceDataSource.meta); } else { // Default is blank queries importedQueries = ensureQueries(); } const nextQueries = importedQueries.map((q, i) => ({ ...q, ...generateEmptyQuery(queries), })); dispatch(queriesImportedAction({ exploreId, queries: nextQueries })); }; } /** * Tests datasource. */ export const testDatasource = (exploreId: ExploreId, instance: DataSourceApi): ThunkResult<void> => { return async dispatch => { let datasourceError = null; dispatch(testDataSourcePendingAction({ exploreId })); try { const testResult = await instance.testDatasource(); datasourceError = testResult.status === 'success' ? null : testResult.message; } catch (error) { datasourceError = (error && error.statusText) || 'Network error'; } if (datasourceError) { dispatch(testDataSourceFailureAction({ exploreId, error: datasourceError })); return; } dispatch(testDataSourceSuccessAction({ exploreId })); }; }; /** * Reconnects datasource when there is a connection failure. */ export const reconnectDatasource = (exploreId: ExploreId): ThunkResult<void> => { return async (dispatch, getState) => { const instance = getState().explore[exploreId].datasourceInstance; dispatch(changeDatasource(exploreId, instance.name)); }; }; /** * Main action to asynchronously load a datasource. Dispatches lots of smaller actions for feedback. */ export function loadDatasource(exploreId: ExploreId, instance: DataSourceApi): ThunkResult<void> { return async (dispatch, getState) => { const datasourceName = instance.name; // Keep ID to track selection dispatch(loadDatasourcePendingAction({ exploreId, requestedDatasourceName: datasourceName })); await dispatch(testDatasource(exploreId, instance)); if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) { // User already changed datasource again, discard results return; } if (instance.init) { instance.init(); } if (datasourceName !== getState().explore[exploreId].requestedDatasourceName) { // User already changed datasource again, discard results return; } dispatch(loadDatasourceReady(exploreId, instance)); }; } /** * Action to modify a query given a datasource-specific modifier action. * @param exploreId Explore area * @param modification Action object with a type, e.g., ADD_FILTER * @param index Optional query row index. If omitted, the modification is applied to all query rows. * @param modifier Function that executes the modification, typically `datasourceInstance.modifyQueries`. */ export function modifyQueries( exploreId: ExploreId, modification: QueryFixAction, index: number, modifier: any ): ThunkResult<void> { return dispatch => { dispatch(modifyQueriesAction({ exploreId, modification, index, modifier })); if (!modification.preventSubmit) { dispatch(runQueries(exploreId)); } }; } /** * Mark a query transaction as failed with an error extracted from the query response. * The transaction will be marked as `done`. */ export function queryTransactionFailure( exploreId: ExploreId, transactionId: string, response: any, datasourceId: string ): ThunkResult<void> { return (dispatch, getState) => { const { datasourceInstance, queryTransactions } = getState().explore[exploreId]; if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { // Navigated away, queries did not matter return; } // Transaction might have been discarded if (!queryTransactions.find(qt => qt.id === transactionId)) { return; } console.error(response); let error: string; let errorDetails: string; if (response.data) { if (typeof response.data === 'string') { error = response.data; } else if (response.data.error) { error = response.data.error; if (response.data.response) { errorDetails = response.data.response; } } else { throw new Error('Could not handle error response'); } } else if (response.message) { error = response.message; } else if (typeof response === 'string') { error = response; } else { error = 'Unknown error during query transaction. Please check JS console logs.'; } // Mark transactions as complete const nextQueryTransactions = queryTransactions.map(qt => { if (qt.id === transactionId) { return { ...qt, error, errorDetails, done: true, }; } return qt; }); dispatch(queryTransactionFailureAction({ exploreId, queryTransactions: nextQueryTransactions })); }; } /** * Complete a query transaction, mark the transaction as `done` and store query state in URL. * If the transaction was started by a scanner, it keeps on scanning for more results. * Side-effect: the query is stored in localStorage. * @param exploreId Explore area * @param transactionId ID * @param result Response from `datasourceInstance.query()` * @param latency Duration between request and response * @param queries Queries from all query rows * @param datasourceId Origin datasource instance, used to discard results if current datasource is different */ export function queryTransactionSuccess( exploreId: ExploreId, transactionId: string, result: any, latency: number, queries: DataQuery[], datasourceId: string ): ThunkResult<void> { return (dispatch, getState) => { const { datasourceInstance, history, queryTransactions, scanner, scanning } = getState().explore[exploreId]; // If datasource already changed, results do not matter if (datasourceInstance.meta.id !== datasourceId) { return; } // Transaction might have been discarded const transaction = queryTransactions.find(qt => qt.id === transactionId); if (!transaction) { return; } // Get query hints let hints: QueryHint[]; if (datasourceInstance.getQueryHints) { hints = datasourceInstance.getQueryHints(transaction.query, result); } // Mark transactions as complete and attach result const nextQueryTransactions = queryTransactions.map(qt => { if (qt.id === transactionId) { return { ...qt, hints, latency, result, done: true, }; } return qt; }); // Side-effect: Saving history in localstorage const nextHistory = updateHistory(history, datasourceId, queries); dispatch( queryTransactionSuccessAction({ exploreId, history: nextHistory, queryTransactions: nextQueryTransactions, }) ); // Keep scanning for results if this was the last scanning transaction if (scanning) { if (_.size(result) === 0) { const other = nextQueryTransactions.find(qt => qt.scanning && !qt.done); if (!other) { const range = scanner(); dispatch(scanRangeAction({ exploreId, range })); } } else { // We can stop scanning if we have a result dispatch(scanStopAction({ exploreId })); } } }; } /** * Main action to run queries and dispatches sub-actions based on which result viewers are active */ export function runQueries(exploreId: ExploreId, ignoreUIState = false): ThunkResult<void> { return (dispatch, getState) => { const { datasourceInstance, queries, showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable, datasourceError, } = getState().explore[exploreId]; if (datasourceError) { // let's not run any queries if data source is in a faulty state return; } if (!hasNonEmptyQuery(queries)) { dispatch(clearQueriesAction({ exploreId })); dispatch(stateSave()); // Remember to saves to state and update location return; } // Some datasource's query builders allow per-query interval limits, // but we're using the datasource interval limit for now const interval = datasourceInstance.interval; dispatch(runQueriesAction()); // Keep table queries first since they need to return quickly if ((ignoreUIState || showingTable) && supportsTable) { dispatch( runQueriesForType( exploreId, 'Table', { interval, format: 'table', instant: true, valueWithRefId: true, }, (data: any) => data[0] ) ); } if ((ignoreUIState || showingGraph) && supportsGraph) { dispatch( runQueriesForType( exploreId, 'Graph', { interval, format: 'time_series', instant: false, }, makeTimeSeriesList ) ); } if ((ignoreUIState || showingLogs) && supportsLogs) { dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' })); } dispatch(stateSave()); }; } /** * Helper action to build a query transaction object and handing the query to the datasource. * @param exploreId Explore area * @param resultType Result viewer that will be associated with this query result * @param queryOptions Query options as required by the datasource's `query()` function. * @param resultGetter Optional result extractor, e.g., if the result is a list and you only need the first element. */ function runQueriesForType( exploreId: ExploreId, resultType: ResultType, queryOptions: QueryOptions, resultGetter?: any ): ThunkResult<void> { return async (dispatch, getState) => { const { datasourceInstance, eventBridge, queries, queryIntervals, range, scanning } = getState().explore[exploreId]; const datasourceId = datasourceInstance.meta.id; // Run all queries concurrently queries.forEach(async (query, rowIndex) => { const transaction = buildQueryTransaction( query, rowIndex, resultType, queryOptions, range, queryIntervals, scanning ); dispatch(queryTransactionStartAction({ exploreId, resultType, rowIndex, transaction })); try { const now = Date.now(); const res = await datasourceInstance.query(transaction.options); eventBridge.emit('data-received', res.data || []); const latency = Date.now() - now; const { queryTransactions } = getState().explore[exploreId]; const results = resultGetter ? resultGetter(res.data, transaction, queryTransactions) : res.data; dispatch(queryTransactionSuccess(exploreId, transaction.id, results, latency, queries, datasourceId)); } catch (response) { eventBridge.emit('data-error', response); dispatch(queryTransactionFailure(exploreId, transaction.id, response, datasourceId)); } }); }; } /** * Start a scan for more results using the given scanner. * @param exploreId Explore area * @param scanner Function that a) returns a new time range and b) triggers a query run for the new range */ export function scanStart(exploreId: ExploreId, scanner: RangeScanner): ThunkResult<void> { return dispatch => { // Register the scanner dispatch(scanStartAction({ exploreId, scanner })); // Scanning must trigger query run, and return the new range const range = scanner(); // Set the new range to be displayed dispatch(scanRangeAction({ exploreId, range })); }; } /** * Reset queries to the given queries. Any modifications will be discarded. * Use this action for clicks on query examples. Triggers a query run. */ export function setQueries(exploreId: ExploreId, rawQueries: DataQuery[]): ThunkResult<void> { return (dispatch, getState) => { // Inject react keys into query objects const queries = rawQueries.map(q => ({ ...q, ...generateEmptyQuery(getState().explore[exploreId].queries) })); dispatch(setQueriesAction({ exploreId, queries })); dispatch(runQueries(exploreId)); }; } /** * Close the split view and save URL state. */ export function splitClose(itemId: ExploreId): ThunkResult<void> { return dispatch => { dispatch(splitCloseAction({ itemId })); dispatch(stateSave()); }; } /** * Open the split view and copy the left state to be the right state. * The right state is automatically initialized. * The copy keeps all query modifications but wipes the query results. */ export function splitOpen(): ThunkResult<void> { return (dispatch, getState) => { // Clone left state to become the right state const leftState = getState().explore[ExploreId.left]; const queryState = getState().location.query[ExploreId.left] as string; const urlState = parseUrlState(queryState); const queryTransactions: QueryTransaction[] = []; const itemState = { ...leftState, queryTransactions, queries: leftState.queries.slice(), exploreId: ExploreId.right, urlState, }; dispatch(splitOpenAction({ itemState })); dispatch(stateSave()); }; } /** * Saves Explore state to URL using the `left` and `right` parameters. * If split view is not active, `right` will not be set. */ export function stateSave(): ThunkResult<void> { return (dispatch, getState) => { const { left, right, split } = getState().explore; const urlStates: { [index: string]: string } = {}; const leftUrlState: ExploreUrlState = { datasource: left.datasourceInstance.name, queries: left.queries.map(clearQueryKeys), range: left.range, ui: { showingGraph: left.showingGraph, showingLogs: left.showingLogs, showingTable: left.showingTable, dedupStrategy: left.dedupStrategy, }, }; urlStates.left = serializeStateToUrlParam(leftUrlState, true); if (split) { const rightUrlState: ExploreUrlState = { datasource: right.datasourceInstance.name, queries: right.queries.map(clearQueryKeys), range: right.range, ui: { showingGraph: right.showingGraph, showingLogs: right.showingLogs, showingTable: right.showingTable, dedupStrategy: right.dedupStrategy, }, }; urlStates.right = serializeStateToUrlParam(rightUrlState, true); } dispatch(updateLocation({ query: urlStates })); }; } /** * Creates action to collapse graph/logs/table panel. When panel is collapsed, * queries won't be run */ const togglePanelActionCreator = ( actionCreator: | ActionCreator<ToggleGraphPayload> | ActionCreator<ToggleLogsPayload> | ActionCreator<ToggleTablePayload> ) => (exploreId: ExploreId, isPanelVisible: boolean): ThunkResult<void> => { return dispatch => { let uiFragmentStateUpdate: Partial<ExploreUIState>; const shouldRunQueries = !isPanelVisible; switch (actionCreator.type) { case toggleGraphAction.type: uiFragmentStateUpdate = { showingGraph: !isPanelVisible }; break; case toggleLogsAction.type: uiFragmentStateUpdate = { showingLogs: !isPanelVisible }; break; case toggleTableAction.type: uiFragmentStateUpdate = { showingTable: !isPanelVisible }; break; } dispatch(actionCreator({ exploreId })); dispatch(updateExploreUIState(exploreId, uiFragmentStateUpdate)); if (shouldRunQueries) { dispatch(runQueries(exploreId)); } }; }; /** * Expand/collapse the graph result viewer. When collapsed, graph queries won't be run. */ export const toggleGraph = togglePanelActionCreator(toggleGraphAction); /** * Expand/collapse the logs result viewer. When collapsed, log queries won't be run. */ export const toggleLogs = togglePanelActionCreator(toggleLogsAction); /** * Expand/collapse the table result viewer. When collapsed, table queries won't be run. */ export const toggleTable = togglePanelActionCreator(toggleTableAction); /** * Change logs deduplication strategy and update URL. */ export const changeDedupStrategy = (exploreId: ExploreId, dedupStrategy: LogsDedupStrategy): ThunkResult<void> => { return dispatch => { dispatch(updateExploreUIState(exploreId, { dedupStrategy })); }; }; export function refreshExplore(exploreId: ExploreId): ThunkResult<void> { return (dispatch, getState) => { const itemState = getState().explore[exploreId]; if (!itemState.initialized) { return; } const { urlState, update, containerWidth, eventBridge } = itemState; const { datasource, queries, range, ui } = urlState; const refreshQueries = queries.map(q => ({ ...q, ...generateEmptyQuery(itemState.queries) })); const refreshRange = { from: parseTime(range.from), to: parseTime(range.to) }; // need to refresh datasource if (update.datasource) { const initialQueries = ensureQueries(queries); const initialRange = { from: parseTime(range.from), to: parseTime(range.to) }; dispatch(initializeExplore(exploreId, datasource, initialQueries, initialRange, containerWidth, eventBridge, ui)); return; } if (update.range) { dispatch(changeTimeAction({ exploreId, range: refreshRange as TimeRange })); } // need to refresh ui state if (update.ui) { dispatch(updateUIStateAction({ ...ui, exploreId })); } // need to refresh queries if (update.queries) { dispatch(setQueriesAction({ exploreId, queries: refreshQueries })); } // always run queries when refresh is needed if (update.queries || update.ui || update.range) { dispatch(runQueries(exploreId)); } }; }
public/app/features/explore/state/actions.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9989226460456848, 0.14141620695590973, 0.0001637152163311839, 0.00027055523241870105, 0.33158743381500244 ]
{ "id": 4, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: changeSizeAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { range, datasourceInstance } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const containerWidth = action.payload.width;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "replace", "replace", "replace", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 167 }
<?xml version="1.0" encoding="utf-8"?> <!-- Generator: Adobe Illustrator 20.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) --> <svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="64px" height="64px" viewBox="0 0 64 64" style="enable-background:new 0 0 64 64;" xml:space="preserve"> <style type="text/css"> .st0{fill:#52545c;} </style> <g> <path class="st0" d="M62.5,55.1L43.9,36.6C49.8,27.4,48.7,15,40.7,7C31.4-2.3,16.3-2.3,7,7s-9.3,24.4,0,33.7c8,8,20.4,9.1,29.6,3.3 l18.5,18.5c2,2,5.3,2,7.4,0C64.5,60.5,64.5,57.2,62.5,55.1z M23.8,40.2c-4.4,0-8.5-1.7-11.6-4.8c-3.1-3.1-4.8-7.2-4.8-11.6 s1.7-8.5,4.8-11.6c3.1-3.1,7.2-4.8,11.6-4.8c4.4,0,8.5,1.7,11.6,4.8c6.4,6.4,6.4,16.8,0,23.2C32.3,38.5,28.2,40.2,23.8,40.2z"/> </g> <path class="st0" d="M33.3,27H14.4c-0.3,0-0.5-0.2-0.5-0.5v-5.1c0-0.3,0.2-0.5,0.5-0.5h18.9c0.3,0,0.5,0.2,0.5,0.5v5.1 C33.8,26.7,33.6,27,33.3,27z"/> </svg>
public/img/icons_light_theme/icon_zoom_out.svg
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017320556798949838, 0.0001713113160803914, 0.0001694170496193692, 0.0001713113160803914, 0.000001894259185064584 ]
{ "id": 4, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: changeSizeAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { range, datasourceInstance } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const containerWidth = action.payload.width;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "replace", "replace", "replace", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 167 }
import { react2AngularDirective } from 'app/core/utils/react2angular'; import { QueryEditor as StackdriverQueryEditor } from 'app/plugins/datasource/stackdriver/components/QueryEditor'; import { AnnotationQueryEditor as StackdriverAnnotationQueryEditor } from 'app/plugins/datasource/stackdriver/components/AnnotationQueryEditor'; import { PasswordStrength } from './components/PasswordStrength'; import PageHeader from './components/PageHeader/PageHeader'; import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; import { SearchResult } from './components/search/SearchResult'; import { TagFilter } from './components/TagFilter/TagFilter'; import { SideMenu } from './components/sidemenu/SideMenu'; import { MetricSelect } from './components/Select/MetricSelect'; import AppNotificationList from './components/AppNotifications/AppNotificationList'; import { ColorPicker, SeriesColorPickerPopoverWithTheme, SecretFormField } from '@grafana/ui'; import { FunctionEditor } from 'app/plugins/datasource/graphite/FunctionEditor'; export function registerAngularDirectives() { react2AngularDirective('passwordStrength', PasswordStrength, ['password']); react2AngularDirective('sidemenu', SideMenu, []); react2AngularDirective('functionEditor', FunctionEditor, ['func', 'onRemove', 'onMoveLeft', 'onMoveRight']); react2AngularDirective('appNotificationsList', AppNotificationList, []); react2AngularDirective('pageHeader', PageHeader, ['model', 'noTabs']); react2AngularDirective('emptyListCta', EmptyListCTA, ['model']); react2AngularDirective('searchResult', SearchResult, []); react2AngularDirective('tagFilter', TagFilter, [ 'tags', ['onChange', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }], ]); react2AngularDirective('colorPicker', ColorPicker, [ 'color', ['onChange', { watchDepth: 'reference', wrapApply: true }], ]); react2AngularDirective('seriesColorPickerPopover', SeriesColorPickerPopoverWithTheme, [ 'color', 'series', 'onColorChange', 'onToggleAxis', ]); react2AngularDirective('metricSelect', MetricSelect, [ 'options', 'onChange', 'value', 'isSearchable', 'className', 'placeholder', ['variables', { watchDepth: 'reference' }], ]); react2AngularDirective('stackdriverQueryEditor', StackdriverQueryEditor, [ 'target', 'onQueryChange', 'onExecuteQuery', ['events', { watchDepth: 'reference' }], ['datasource', { watchDepth: 'reference' }], ['templateSrv', { watchDepth: 'reference' }], ]); react2AngularDirective('stackdriverAnnotationQueryEditor', StackdriverAnnotationQueryEditor, [ 'target', 'onQueryChange', 'onExecuteQuery', ['datasource', { watchDepth: 'reference' }], ['templateSrv', { watchDepth: 'reference' }], ]); react2AngularDirective('secretFormField', SecretFormField, [ 'value', 'isConfigured', 'inputWidth', ['onReset', { watchDepth: 'reference', wrapApply: true }], ['onChange', { watchDepth: 'reference', wrapApply: true }], ]); }
public/app/core/angular_wrappers.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0001783185580279678, 0.00017648357606958598, 0.0001755861158017069, 0.00017609531641937792, 8.755583280617429e-7 ]
{ "id": 4, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: changeSizeAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { range, datasourceInstance } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const containerWidth = action.payload.width;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "replace", "replace", "replace", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 167 }
<page-header model="ctrl.navModel"></page-header> <div class="page-container page-body" ng-cloak> <h3 class="page-sub-heading">New Dashboard Folder</h3> <form name="ctrl.saveForm" ng-submit="ctrl.create()" novalidate> <div class="gf-form-inline"> <div class="gf-form gf-form--grow"> <label class="gf-form-label width-10">Name</label> <input type="text" class="gf-form-input" ng-model="ctrl.title" give-focus="true" ng-change="ctrl.titleChanged()" ng-model-options="{ debounce: 400 }" ng-class="{'validation-error': ctrl.nameExists || !ctrl.dash.title}"> <label class="gf-form-label text-success" ng-if="ctrl.titleTouched && !ctrl.hasValidationError"> <i class="fa fa-check"></i> </label> </div> </div> <div class="gf-form-inline" ng-if="ctrl.hasValidationError"> <div class="gf-form offset-width-10 gf-form--grow"> <label class="gf-form-label text-warning gf-form-label--grow"> <i class="fa fa-warning"></i> {{ctrl.validationError}} </label> </div> </div> <div class="gf-form-button-row"> <button type="submit" class="btn btn-primary width-12" ng-disabled="!ctrl.titleTouched || ctrl.hasValidationError"> <i class="fa fa-save"></i> Create </button> </div> </form> </div>
public/app/features/folders/partials/create_folder.html
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017145034507848322, 0.00017039201338775456, 0.00016907491954043508, 0.00017052137991413474, 0.0000010415717497380683 ]
{ "id": 5, "code_window": [ " const containerWidth = action.payload.width;\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return { ...state, containerWidth, queryIntervals };\n", " },\n", " })\n", " .addMapper({\n", " filter: changeTimeAction,\n", " mapper: (state, action): ExploreItemState => {\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " return { ...state, containerWidth };\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 173 }
// @ts-ignore import _ from 'lodash'; import { calculateResultsFromQueryTransactions, generateEmptyQuery, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, QueryTransaction, ExploreId, ExploreUpdateState } from 'app/types/explore'; import { DataQuery } from '@grafana/ui/src/types'; import { HigherOrderAction, ActionTypes, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { addQueryRowAction, changeQueryAction, changeSizeAction, changeTimeAction, clearQueriesAction, highlightLogsExpressionAction, initializeExploreAction, updateDatasourceInstanceAction, loadDatasourceMissingAction, loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, removeQueryRowAction, scanRangeAction, scanStartAction, scanStopAction, setQueriesAction, toggleGraphAction, toggleLogsAction, toggleTableAction, queriesImportedAction, updateUIStateAction, toggleLogLevelAction, } from './actionTypes'; import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; export const DEFAULT_RANGE = { from: 'now-6h', to: 'now', }; // Millies step for helper bar charts const DEFAULT_GRAPH_INTERVAL = 15 * 1000; export const makeInitialUpdateState = (): ExploreUpdateState => ({ datasource: false, queries: false, range: false, ui: false, }); /** * Returns a fresh Explore area state */ export const makeExploreItemState = (): ExploreItemState => ({ StartPage: undefined, containerWidth: 0, datasourceInstance: null, requestedDatasourceName: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, exploreDatasources: [], history: [], queries: [], initialized: false, queryTransactions: [], queryIntervals: { interval: '15s', intervalMs: DEFAULT_GRAPH_INTERVAL }, range: DEFAULT_RANGE, scanning: false, scanRange: null, showingGraph: true, showingLogs: true, showingTable: true, supportsGraph: null, supportsLogs: null, supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), }); /** * Global Explore state that handles multiple Explore areas and the split state */ export const initialExploreState: ExploreState = { split: null, left: makeExploreItemState(), right: makeExploreItemState(), }; /** * Reducer for an Explore area, to be used by the global Explore reducer. */ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemState) .addMapper({ filter: addQueryRowAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { index, query } = action.payload; // Add to queries, which will cause a new row to be rendered const nextQueries = [...queries.slice(0, index + 1), { ...query }, ...queries.slice(index + 1)]; // Ongoing transactions need to update their row indices const nextQueryTransactions = queryTransactions.map(qt => { if (qt.rowIndex > index) { return { ...qt, rowIndex: qt.rowIndex + 1, }; } return qt; }); return { ...state, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeQueryAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { query, index } = action.payload; // Override path: queries are completely reset const nextQuery: DataQuery = { ...query, ...generateEmptyQuery(state.queries) }; const nextQueries = [...queries]; nextQueries[index] = nextQuery; // Discard ongoing transaction related to row query const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index); return { ...state, queries: nextQueries, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeSizeAction, mapper: (state, action): ExploreItemState => { const { range, datasourceInstance } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; } const containerWidth = action.payload.width; const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, containerWidth, queryIntervals }; }, }) .addMapper({ filter: changeTimeAction, mapper: (state, action): ExploreItemState => { return { ...state, range: action.payload.range }; }, }) .addMapper({ filter: clearQueriesAction, mapper: (state): ExploreItemState => { const queries = ensureQueries(); return { ...state, queries: queries.slice(), queryTransactions: [], showingStartPage: Boolean(state.StartPage), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: highlightLogsExpressionAction, mapper: (state, action): ExploreItemState => { const { expressions } = action.payload; return { ...state, logsHighlighterExpressions: expressions }; }, }) .addMapper({ filter: initializeExploreAction, mapper: (state, action): ExploreItemState => { const { containerWidth, eventBridge, queries, range, ui } = action.payload; return { ...state, containerWidth, eventBridge, range, queries, initialized: true, queryKeys: getQueryKeys(queries, state.datasourceInstance), ...ui, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: updateDatasourceInstanceAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance } = action.payload; // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; const supportsTable = datasourceInstance.meta.tables; // Custom components const StartPage = datasourceInstance.components.ExploreStartPage; return { ...state, datasourceInstance, supportsGraph, supportsLogs, supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), }; }, }) .addMapper({ filter: loadDatasourceMissingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceMissing: true, datasourceLoading: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadDatasourcePendingAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceLoading: true, requestedDatasourceName: action.payload.requestedDatasourceName, }; }, }) .addMapper({ filter: loadDatasourceReadyAction, mapper: (state, action): ExploreItemState => { const { containerWidth, range, datasourceInstance } = state; const { history } = action.payload; const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth); return { ...state, queryIntervals, history, datasourceLoading: false, datasourceMissing: false, logsHighlighterExpressions: undefined, queryTransactions: [], update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: modifyQueriesAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { modification, index, modifier } = action.payload; let nextQueries: DataQuery[]; let nextQueryTransactions: QueryTransaction[]; if (index === undefined) { // Modify all queries nextQueries = queries.map((query, i) => ({ ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries), })); // Discard all ongoing transactions nextQueryTransactions = []; } else { // Modify query only at index nextQueries = queries.map((query, i) => { // Synchronize all queries with local query cache to ensure consistency // TODO still needed? return i === index ? { ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries) } : query; }); nextQueryTransactions = queryTransactions // Consume the hint corresponding to the action .map(qt => { if (qt.hints != null && qt.rowIndex === index) { qt.hints = qt.hints.filter(hint => hint.fix.action !== modification); } return qt; }) // Preserve previous row query transaction to keep results visible if next query is incomplete .filter(qt => modification.preventSubmit || qt.rowIndex !== index); } return { ...state, queries: nextQueries, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), queryTransactions: nextQueryTransactions, }; }, }) .addMapper({ filter: queryTransactionFailureAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = action.payload; return { ...state, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionStartAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = state; const { resultType, rowIndex, transaction } = action.payload; // Discarding existing transactions of same type const remainingTransactions = queryTransactions.filter( qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex) ); // Append new transaction const nextQueryTransactions: QueryTransaction[] = [...remainingTransactions, transaction]; return { ...state, queryTransactions: nextQueryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionSuccessAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queryIntervals } = state; const { history, queryTransactions } = action.payload; const results = calculateResultsFromQueryTransactions( queryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, history, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queries, queryIntervals, queryTransactions, queryKeys } = state; const { index } = action.payload; if (queries.length <= 1) { return state; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; const nextQueryKeys = [...queryKeys.slice(0, index), ...queryKeys.slice(index + 1)]; // Discard transactions related to row query const nextQueryTransactions = queryTransactions.filter(qt => nextQueries.some(nq => nq.key === qt.query.key)); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: nextQueryKeys, }; }, }) .addMapper({ filter: scanRangeAction, mapper: (state, action): ExploreItemState => { return { ...state, scanRange: action.payload.range }; }, }) .addMapper({ filter: scanStartAction, mapper: (state, action): ExploreItemState => { return { ...state, scanning: true, scanner: action.payload.scanner }; }, }) .addMapper({ filter: scanStopAction, mapper: (state): ExploreItemState => { const { queryTransactions } = state; const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done); return { ...state, queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined, scanner: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: setQueriesAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries: queries.slice(), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: updateUIStateAction, mapper: (state, action): ExploreItemState => { return { ...state, ...action.payload }; }, }) .addMapper({ filter: toggleGraphAction, mapper: (state): ExploreItemState => { const showingGraph = !state.showingGraph; let nextQueryTransactions = state.queryTransactions; if (!showingGraph) { // Discard transactions related to Graph query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleLogsAction, mapper: (state): ExploreItemState => { const showingLogs = !state.showingLogs; let nextQueryTransactions = state.queryTransactions; if (!showingLogs) { // Discard transactions related to Logs query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleTableAction, mapper: (state): ExploreItemState => { const showingTable = !state.showingTable; if (showingTable) { return { ...state, queryTransactions: state.queryTransactions }; } // Toggle off needs discarding of table queries and results const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table'); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, state.datasourceInstance, state.queryIntervals.intervalMs ); return { ...state, ...results, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: queriesImportedAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries, queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: toggleLogLevelAction, mapper: (state, action): ExploreItemState => { const { hiddenLogLevels } = action.payload; return { ...state, hiddenLogLevels: Array.from(hiddenLogLevels), }; }, }) .addMapper({ filter: testDataSourcePendingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceSuccessAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceFailureAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceError: action.payload.error, queryTransactions: [], graphResult: undefined, tableResult: undefined, logsResult: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadExploreDatasources, mapper: (state, action): ExploreItemState => { return { ...state, exploreDatasources: action.payload.exploreDatasources, }; }, }) .create(); export const updateChildRefreshState = ( state: Readonly<ExploreItemState>, payload: LocationUpdate, exploreId: ExploreId ): ExploreItemState => { const path = payload.path || ''; const queryState = payload.query[exploreId] as string; if (!queryState) { return state; } const urlState = parseUrlState(queryState); if (!state.urlState || path !== '/explore') { // we only want to refresh when browser back/forward return { ...state, urlState, update: { datasource: false, queries: false, range: false, ui: false } }; } const datasource = _.isEqual(urlState ? urlState.datasource : '', state.urlState.datasource) === false; const queries = _.isEqual(urlState ? urlState.queries : [], state.urlState.queries) === false; const range = _.isEqual(urlState ? urlState.range : DEFAULT_RANGE, state.urlState.range) === false; const ui = _.isEqual(urlState ? urlState.ui : DEFAULT_UI_STATE, state.urlState.ui) === false; return { ...state, urlState, update: { ...state.update, datasource, queries, range, ui, }, }; }; /** * Global Explore reducer that handles multiple Explore areas (left and right). * Actions that have an `exploreId` get routed to the ExploreItemReducer. */ export const exploreReducer = (state = initialExploreState, action: HigherOrderAction): ExploreState => { switch (action.type) { case splitCloseAction.type: { const { itemId } = action.payload as SplitCloseActionPayload; const targetSplit = { left: itemId === ExploreId.left ? state.right : state.left, right: initialExploreState.right, }; return { ...state, ...targetSplit, split: false, }; } case ActionTypes.SplitOpen: { return { ...state, split: true, right: { ...action.payload.itemState } }; } case ActionTypes.ResetExplore: { return initialExploreState; } case updateLocation.type: { const { query } = action.payload; if (!query || !query[ExploreId.left]) { return state; } const split = query[ExploreId.right] ? true : false; const leftState = state[ExploreId.left]; const rightState = state[ExploreId.right]; return { ...state, split, [ExploreId.left]: updateChildRefreshState(leftState, action.payload, ExploreId.left), [ExploreId.right]: updateChildRefreshState(rightState, action.payload, ExploreId.right), }; } } if (action.payload) { const { exploreId } = action.payload as any; if (exploreId !== undefined) { const exploreItemState = state[exploreId]; return { ...state, [exploreId]: itemReducer(exploreItemState, action) }; } } return state; }; export default { explore: exploreReducer, };
public/app/features/explore/state/reducers.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9983780384063721, 0.017498912289738655, 0.00016535389295313507, 0.0011148471385240555, 0.12182487547397614 ]
{ "id": 5, "code_window": [ " const containerWidth = action.payload.width;\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return { ...state, containerWidth, queryIntervals };\n", " },\n", " })\n", " .addMapper({\n", " filter: changeTimeAction,\n", " mapper: (state, action): ExploreItemState => {\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " return { ...state, containerWidth };\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 173 }
package conditions import ( "testing" . "github.com/smartystreets/goconvey/convey" "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/tsdb" ) func TestSimpleReducer(t *testing.T) { Convey("Test simple reducer by calculating", t, func() { Convey("sum", func() { result := testReducer("sum", 1, 2, 3) So(result, ShouldEqual, float64(6)) }) Convey("min", func() { result := testReducer("min", 3, 2, 1) So(result, ShouldEqual, float64(1)) }) Convey("max", func() { result := testReducer("max", 1, 2, 3) So(result, ShouldEqual, float64(3)) }) Convey("count", func() { result := testReducer("count", 1, 2, 3000) So(result, ShouldEqual, float64(3)) }) Convey("last", func() { result := testReducer("last", 1, 2, 3000) So(result, ShouldEqual, float64(3000)) }) Convey("median odd amount of numbers", func() { result := testReducer("median", 1, 2, 3000) So(result, ShouldEqual, float64(2)) }) Convey("median even amount of numbers", func() { result := testReducer("median", 1, 2, 4, 3000) So(result, ShouldEqual, float64(3)) }) Convey("median with one values", func() { result := testReducer("median", 1) So(result, ShouldEqual, float64(1)) }) Convey("median should ignore null values", func() { reducer := NewSimpleReducer("median") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(1)), 4)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(2)), 5)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(3)), 6)) result := reducer.Reduce(series) So(result.Valid, ShouldEqual, true) So(result.Float64, ShouldEqual, float64(2)) }) Convey("avg", func() { result := testReducer("avg", 1, 2, 3) So(result, ShouldEqual, float64(2)) }) Convey("avg with only nulls", func() { reducer := NewSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) Convey("count_non_null", func() { Convey("with null values and real values", func() { reducer := NewSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 3)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4)) So(reducer.Reduce(series).Valid, ShouldEqual, true) So(reducer.Reduce(series).Float64, ShouldEqual, 2) }) Convey("with null values", func() { reducer := NewSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) }) Convey("avg of number values and null values should ignore nulls", func() { reducer := NewSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 1)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4)) So(reducer.Reduce(series).Float64, ShouldEqual, float64(3)) }) Convey("diff one point", func() { result := testReducer("diff", 30) So(result, ShouldEqual, float64(0)) }) Convey("diff two points", func() { result := testReducer("diff", 30, 40) So(result, ShouldEqual, float64(10)) }) Convey("diff three points", func() { result := testReducer("diff", 30, 40, 40) So(result, ShouldEqual, float64(10)) }) Convey("diff with only nulls", func() { reducer := NewSimpleReducer("diff") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) Convey("percent_diff one point", func() { result := testReducer("percent_diff", 40) So(result, ShouldEqual, float64(0)) }) Convey("percent_diff two points", func() { result := testReducer("percent_diff", 30, 40) So(result, ShouldEqual, float64(33.33333333333333)) }) Convey("percent_diff three points", func() { result := testReducer("percent_diff", 30, 40, 40) So(result, ShouldEqual, float64(33.33333333333333)) }) Convey("percent_diff with only nulls", func() { reducer := NewSimpleReducer("percent_diff") series := &tsdb.TimeSeries{ Name: "test time serie", } series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) }) } func testReducer(typ string, datapoints ...float64) float64 { reducer := NewSimpleReducer(typ) series := &tsdb.TimeSeries{ Name: "test time serie", } for idx := range datapoints { series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(datapoints[idx]), 1234134)) } return reducer.Reduce(series).Float64 }
pkg/services/alerting/conditions/reducer_test.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0001797237346181646, 0.0001742342283250764, 0.00016709321062080562, 0.00017416526679880917, 0.0000026960819923260715 ]
{ "id": 5, "code_window": [ " const containerWidth = action.payload.width;\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return { ...state, containerWidth, queryIntervals };\n", " },\n", " })\n", " .addMapper({\n", " filter: changeTimeAction,\n", " mapper: (state, action): ExploreItemState => {\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " return { ...state, containerWidth };\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 173 }
The MIT License (MIT) Copyright 2012 Keith Rarick Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
vendor/github.com/kr/pretty/License
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0001766255300026387, 0.00017124773876275867, 0.000165825811563991, 0.00017129186016973108, 0.0000044090766095905565 ]
{ "id": 5, "code_window": [ " const containerWidth = action.payload.width;\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return { ...state, containerWidth, queryIntervals };\n", " },\n", " })\n", " .addMapper({\n", " filter: changeTimeAction,\n", " mapper: (state, action): ExploreItemState => {\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " return { ...state, containerWidth };\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 173 }
// Libraries import React, { FunctionComponent } from 'react'; // Components import { Tooltip } from '@grafana/ui'; interface Props { icon: string; tooltip: string; classSuffix: string; onClick?: () => void; href?: string; } export const DashNavButton: FunctionComponent<Props> = ({ icon, tooltip, classSuffix, onClick, href }) => { if (onClick) { return ( <Tooltip content={tooltip}> <button className={`btn navbar-button navbar-button--${classSuffix}`} onClick={onClick}> <i className={icon} /> </button> </Tooltip> ); } return ( <Tooltip content={tooltip}> <a className={`btn navbar-button navbar-button--${classSuffix}`} href={href}> <i className={icon} /> </a> </Tooltip> ); };
public/app/features/dashboard/components/DashNav/DashNavButton.tsx
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0001774623233359307, 0.0001720535510685295, 0.00016780900477897376, 0.00017147144535556436, 0.00000356011059921002 ]
{ "id": 6, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: loadDatasourceReadyAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { containerWidth, range, datasourceInstance } = state;\n", " const { history } = action.payload;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 268 }
// @ts-ignore import _ from 'lodash'; import { calculateResultsFromQueryTransactions, generateEmptyQuery, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, QueryTransaction, ExploreId, ExploreUpdateState } from 'app/types/explore'; import { DataQuery } from '@grafana/ui/src/types'; import { HigherOrderAction, ActionTypes, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { addQueryRowAction, changeQueryAction, changeSizeAction, changeTimeAction, clearQueriesAction, highlightLogsExpressionAction, initializeExploreAction, updateDatasourceInstanceAction, loadDatasourceMissingAction, loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, removeQueryRowAction, scanRangeAction, scanStartAction, scanStopAction, setQueriesAction, toggleGraphAction, toggleLogsAction, toggleTableAction, queriesImportedAction, updateUIStateAction, toggleLogLevelAction, } from './actionTypes'; import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; export const DEFAULT_RANGE = { from: 'now-6h', to: 'now', }; // Millies step for helper bar charts const DEFAULT_GRAPH_INTERVAL = 15 * 1000; export const makeInitialUpdateState = (): ExploreUpdateState => ({ datasource: false, queries: false, range: false, ui: false, }); /** * Returns a fresh Explore area state */ export const makeExploreItemState = (): ExploreItemState => ({ StartPage: undefined, containerWidth: 0, datasourceInstance: null, requestedDatasourceName: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, exploreDatasources: [], history: [], queries: [], initialized: false, queryTransactions: [], queryIntervals: { interval: '15s', intervalMs: DEFAULT_GRAPH_INTERVAL }, range: DEFAULT_RANGE, scanning: false, scanRange: null, showingGraph: true, showingLogs: true, showingTable: true, supportsGraph: null, supportsLogs: null, supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), }); /** * Global Explore state that handles multiple Explore areas and the split state */ export const initialExploreState: ExploreState = { split: null, left: makeExploreItemState(), right: makeExploreItemState(), }; /** * Reducer for an Explore area, to be used by the global Explore reducer. */ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemState) .addMapper({ filter: addQueryRowAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { index, query } = action.payload; // Add to queries, which will cause a new row to be rendered const nextQueries = [...queries.slice(0, index + 1), { ...query }, ...queries.slice(index + 1)]; // Ongoing transactions need to update their row indices const nextQueryTransactions = queryTransactions.map(qt => { if (qt.rowIndex > index) { return { ...qt, rowIndex: qt.rowIndex + 1, }; } return qt; }); return { ...state, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeQueryAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { query, index } = action.payload; // Override path: queries are completely reset const nextQuery: DataQuery = { ...query, ...generateEmptyQuery(state.queries) }; const nextQueries = [...queries]; nextQueries[index] = nextQuery; // Discard ongoing transaction related to row query const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index); return { ...state, queries: nextQueries, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeSizeAction, mapper: (state, action): ExploreItemState => { const { range, datasourceInstance } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; } const containerWidth = action.payload.width; const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, containerWidth, queryIntervals }; }, }) .addMapper({ filter: changeTimeAction, mapper: (state, action): ExploreItemState => { return { ...state, range: action.payload.range }; }, }) .addMapper({ filter: clearQueriesAction, mapper: (state): ExploreItemState => { const queries = ensureQueries(); return { ...state, queries: queries.slice(), queryTransactions: [], showingStartPage: Boolean(state.StartPage), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: highlightLogsExpressionAction, mapper: (state, action): ExploreItemState => { const { expressions } = action.payload; return { ...state, logsHighlighterExpressions: expressions }; }, }) .addMapper({ filter: initializeExploreAction, mapper: (state, action): ExploreItemState => { const { containerWidth, eventBridge, queries, range, ui } = action.payload; return { ...state, containerWidth, eventBridge, range, queries, initialized: true, queryKeys: getQueryKeys(queries, state.datasourceInstance), ...ui, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: updateDatasourceInstanceAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance } = action.payload; // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; const supportsTable = datasourceInstance.meta.tables; // Custom components const StartPage = datasourceInstance.components.ExploreStartPage; return { ...state, datasourceInstance, supportsGraph, supportsLogs, supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), }; }, }) .addMapper({ filter: loadDatasourceMissingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceMissing: true, datasourceLoading: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadDatasourcePendingAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceLoading: true, requestedDatasourceName: action.payload.requestedDatasourceName, }; }, }) .addMapper({ filter: loadDatasourceReadyAction, mapper: (state, action): ExploreItemState => { const { containerWidth, range, datasourceInstance } = state; const { history } = action.payload; const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth); return { ...state, queryIntervals, history, datasourceLoading: false, datasourceMissing: false, logsHighlighterExpressions: undefined, queryTransactions: [], update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: modifyQueriesAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { modification, index, modifier } = action.payload; let nextQueries: DataQuery[]; let nextQueryTransactions: QueryTransaction[]; if (index === undefined) { // Modify all queries nextQueries = queries.map((query, i) => ({ ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries), })); // Discard all ongoing transactions nextQueryTransactions = []; } else { // Modify query only at index nextQueries = queries.map((query, i) => { // Synchronize all queries with local query cache to ensure consistency // TODO still needed? return i === index ? { ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries) } : query; }); nextQueryTransactions = queryTransactions // Consume the hint corresponding to the action .map(qt => { if (qt.hints != null && qt.rowIndex === index) { qt.hints = qt.hints.filter(hint => hint.fix.action !== modification); } return qt; }) // Preserve previous row query transaction to keep results visible if next query is incomplete .filter(qt => modification.preventSubmit || qt.rowIndex !== index); } return { ...state, queries: nextQueries, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), queryTransactions: nextQueryTransactions, }; }, }) .addMapper({ filter: queryTransactionFailureAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = action.payload; return { ...state, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionStartAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = state; const { resultType, rowIndex, transaction } = action.payload; // Discarding existing transactions of same type const remainingTransactions = queryTransactions.filter( qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex) ); // Append new transaction const nextQueryTransactions: QueryTransaction[] = [...remainingTransactions, transaction]; return { ...state, queryTransactions: nextQueryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionSuccessAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queryIntervals } = state; const { history, queryTransactions } = action.payload; const results = calculateResultsFromQueryTransactions( queryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, history, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queries, queryIntervals, queryTransactions, queryKeys } = state; const { index } = action.payload; if (queries.length <= 1) { return state; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; const nextQueryKeys = [...queryKeys.slice(0, index), ...queryKeys.slice(index + 1)]; // Discard transactions related to row query const nextQueryTransactions = queryTransactions.filter(qt => nextQueries.some(nq => nq.key === qt.query.key)); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: nextQueryKeys, }; }, }) .addMapper({ filter: scanRangeAction, mapper: (state, action): ExploreItemState => { return { ...state, scanRange: action.payload.range }; }, }) .addMapper({ filter: scanStartAction, mapper: (state, action): ExploreItemState => { return { ...state, scanning: true, scanner: action.payload.scanner }; }, }) .addMapper({ filter: scanStopAction, mapper: (state): ExploreItemState => { const { queryTransactions } = state; const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done); return { ...state, queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined, scanner: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: setQueriesAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries: queries.slice(), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: updateUIStateAction, mapper: (state, action): ExploreItemState => { return { ...state, ...action.payload }; }, }) .addMapper({ filter: toggleGraphAction, mapper: (state): ExploreItemState => { const showingGraph = !state.showingGraph; let nextQueryTransactions = state.queryTransactions; if (!showingGraph) { // Discard transactions related to Graph query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleLogsAction, mapper: (state): ExploreItemState => { const showingLogs = !state.showingLogs; let nextQueryTransactions = state.queryTransactions; if (!showingLogs) { // Discard transactions related to Logs query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleTableAction, mapper: (state): ExploreItemState => { const showingTable = !state.showingTable; if (showingTable) { return { ...state, queryTransactions: state.queryTransactions }; } // Toggle off needs discarding of table queries and results const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table'); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, state.datasourceInstance, state.queryIntervals.intervalMs ); return { ...state, ...results, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: queriesImportedAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries, queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: toggleLogLevelAction, mapper: (state, action): ExploreItemState => { const { hiddenLogLevels } = action.payload; return { ...state, hiddenLogLevels: Array.from(hiddenLogLevels), }; }, }) .addMapper({ filter: testDataSourcePendingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceSuccessAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceFailureAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceError: action.payload.error, queryTransactions: [], graphResult: undefined, tableResult: undefined, logsResult: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadExploreDatasources, mapper: (state, action): ExploreItemState => { return { ...state, exploreDatasources: action.payload.exploreDatasources, }; }, }) .create(); export const updateChildRefreshState = ( state: Readonly<ExploreItemState>, payload: LocationUpdate, exploreId: ExploreId ): ExploreItemState => { const path = payload.path || ''; const queryState = payload.query[exploreId] as string; if (!queryState) { return state; } const urlState = parseUrlState(queryState); if (!state.urlState || path !== '/explore') { // we only want to refresh when browser back/forward return { ...state, urlState, update: { datasource: false, queries: false, range: false, ui: false } }; } const datasource = _.isEqual(urlState ? urlState.datasource : '', state.urlState.datasource) === false; const queries = _.isEqual(urlState ? urlState.queries : [], state.urlState.queries) === false; const range = _.isEqual(urlState ? urlState.range : DEFAULT_RANGE, state.urlState.range) === false; const ui = _.isEqual(urlState ? urlState.ui : DEFAULT_UI_STATE, state.urlState.ui) === false; return { ...state, urlState, update: { ...state.update, datasource, queries, range, ui, }, }; }; /** * Global Explore reducer that handles multiple Explore areas (left and right). * Actions that have an `exploreId` get routed to the ExploreItemReducer. */ export const exploreReducer = (state = initialExploreState, action: HigherOrderAction): ExploreState => { switch (action.type) { case splitCloseAction.type: { const { itemId } = action.payload as SplitCloseActionPayload; const targetSplit = { left: itemId === ExploreId.left ? state.right : state.left, right: initialExploreState.right, }; return { ...state, ...targetSplit, split: false, }; } case ActionTypes.SplitOpen: { return { ...state, split: true, right: { ...action.payload.itemState } }; } case ActionTypes.ResetExplore: { return initialExploreState; } case updateLocation.type: { const { query } = action.payload; if (!query || !query[ExploreId.left]) { return state; } const split = query[ExploreId.right] ? true : false; const leftState = state[ExploreId.left]; const rightState = state[ExploreId.right]; return { ...state, split, [ExploreId.left]: updateChildRefreshState(leftState, action.payload, ExploreId.left), [ExploreId.right]: updateChildRefreshState(rightState, action.payload, ExploreId.right), }; } } if (action.payload) { const { exploreId } = action.payload as any; if (exploreId !== undefined) { const exploreItemState = state[exploreId]; return { ...state, [exploreId]: itemReducer(exploreItemState, action) }; } } return state; }; export default { explore: exploreReducer, };
public/app/features/explore/state/reducers.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9990756511688232, 0.182402566075325, 0.0001647343160584569, 0.002842811169102788, 0.3618911802768707 ]
{ "id": 6, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: loadDatasourceReadyAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { containerWidth, range, datasourceInstance } = state;\n", " const { history } = action.payload;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 268 }
// Libraries import Papa, { ParseResult, ParseConfig, Parser } from 'papaparse'; import defaults from 'lodash/defaults'; import isNumber from 'lodash/isNumber'; // Types import { SeriesData, Field, FieldType } from '../types/index'; import { guessFieldTypeFromValue } from './processSeriesData'; export enum CSVHeaderStyle { full, name, none, } // Subset of all parse options export interface CSVConfig { delimiter?: string; // default: "," newline?: string; // default: "\r\n" quoteChar?: string; // default: '"' encoding?: string; // default: "", headerStyle?: CSVHeaderStyle; } export interface CSVParseCallbacks { /** * Get a callback before any rows are processed * This can return a modified table to force any * Column configurations */ onHeader: (table: SeriesData) => void; // Called after each row is read and onRow: (row: any[]) => void; } export interface CSVOptions { config?: CSVConfig; callback?: CSVParseCallbacks; } export function readCSV(csv: string, options?: CSVOptions): SeriesData[] { return new CSVReader(options).readCSV(csv); } enum ParseState { Starting, InHeader, ReadingRows, } type FieldParser = (value: string) => any; export class CSVReader { config: CSVConfig; callback?: CSVParseCallbacks; field: FieldParser[]; series: SeriesData; state: ParseState; data: SeriesData[]; constructor(options?: CSVOptions) { if (!options) { options = {}; } this.config = options.config || {}; this.callback = options.callback; this.field = []; this.state = ParseState.Starting; this.series = { fields: [], rows: [], }; this.data = []; } // PapaParse callback on each line private step = (results: ParseResult, parser: Parser): void => { for (let i = 0; i < results.data.length; i++) { const line: string[] = results.data[i]; if (line.length < 1) { continue; } const first = line[0]; // null or value, papaparse does not return '' if (first) { // Comment or header queue if (first.startsWith('#')) { // Look for special header column // #{columkey}#a,b,c const idx = first.indexOf('#', 2); if (idx > 0) { const k = first.substr(1, idx - 1); // Simple object used to check if headers match const headerKeys: Field = { name: '#', type: FieldType.number, unit: '#', dateFormat: '#', }; // Check if it is a known/supported column if (headerKeys.hasOwnProperty(k)) { // Starting a new table after reading rows if (this.state === ParseState.ReadingRows) { this.series = { fields: [], rows: [], }; this.data.push(this.series); } padColumnWidth(this.series.fields, line.length); const fields: any[] = this.series.fields; // cast to any so we can lookup by key const v = first.substr(idx + 1); fields[0][k] = v; for (let j = 1; j < fields.length; j++) { fields[j][k] = line[j]; } this.state = ParseState.InHeader; continue; } } else if (this.state === ParseState.Starting) { this.series.fields = makeFieldsFor(line); this.state = ParseState.InHeader; continue; } // Ignore comment lines continue; } if (this.state === ParseState.Starting) { const type = guessFieldTypeFromValue(first); if (type === FieldType.string) { this.series.fields = makeFieldsFor(line); this.state = ParseState.InHeader; continue; } this.series.fields = makeFieldsFor(new Array(line.length)); this.series.fields[0].type = type; this.state = ParseState.InHeader; // fall through to read rows } } if (this.state === ParseState.InHeader) { padColumnWidth(this.series.fields, line.length); this.state = ParseState.ReadingRows; } if (this.state === ParseState.ReadingRows) { // Make sure colum structure is valid if (line.length > this.series.fields.length) { padColumnWidth(this.series.fields, line.length); if (this.callback) { this.callback.onHeader(this.series); } else { // Expand all rows with nulls for (let x = 0; x < this.series.rows.length; x++) { const row = this.series.rows[x]; while (row.length < line.length) { row.push(null); } } } } const row: any[] = []; for (let j = 0; j < line.length; j++) { const v = line[j]; if (v) { if (!this.field[j]) { this.field[j] = makeFieldParser(v, this.series.fields[j]); } row.push(this.field[j](v)); } else { row.push(null); } } if (this.callback) { // Send the header after we guess the type if (this.series.rows.length === 0) { this.callback.onHeader(this.series); this.series.rows.push(row); // Only add the first row } this.callback.onRow(row); } else { this.series.rows.push(row); } } } }; readCSV(text: string): SeriesData[] { this.data = [this.series]; const papacfg = { ...this.config, dynamicTyping: false, skipEmptyLines: true, comments: false, // Keep comment lines step: this.step, } as ParseConfig; Papa.parse(text, papacfg); return this.data; } } function makeFieldParser(value: string, field: Field): FieldParser { if (!field.type) { if (field.name === 'time' || field.name === 'Time') { field.type = FieldType.time; } else { field.type = guessFieldTypeFromValue(value); } } if (field.type === FieldType.number) { return (value: string) => { return parseFloat(value); }; } // Will convert anything that starts with "T" to true if (field.type === FieldType.boolean) { return (value: string) => { return !(value[0] === 'F' || value[0] === 'f' || value[0] === '0'); }; } // Just pass the string back return (value: string) => value; } /** * Creates a field object for each string in the list */ function makeFieldsFor(line: string[]): Field[] { const fields: Field[] = []; for (let i = 0; i < line.length; i++) { const v = line[i] ? line[i] : 'Column ' + (i + 1); fields.push({ name: v }); } return fields; } /** * Makes sure the colum has valid entries up the the width */ function padColumnWidth(fields: Field[], width: number) { if (fields.length < width) { for (let i = fields.length; i < width; i++) { fields.push({ name: 'Field ' + (i + 1), }); } } } type FieldWriter = (value: any) => string; function writeValue(value: any, config: CSVConfig): string { const str = value.toString(); if (str.includes('"')) { // Escape the double quote characters return config.quoteChar + str.replace('"', '""') + config.quoteChar; } if (str.includes('\n') || str.includes(config.delimiter)) { return config.quoteChar + str + config.quoteChar; } return str; } function makeFieldWriter(field: Field, config: CSVConfig): FieldWriter { if (field.type) { if (field.type === FieldType.boolean) { return (value: any) => { return value ? 'true' : 'false'; }; } if (field.type === FieldType.number) { return (value: any) => { if (isNumber(value)) { return value.toString(); } return writeValue(value, config); }; } } return (value: any) => writeValue(value, config); } function getHeaderLine(key: string, fields: Field[], config: CSVConfig): string { for (const f of fields) { if (f.hasOwnProperty(key)) { let line = '#' + key + '#'; for (let i = 0; i < fields.length; i++) { if (i > 0) { line = line + config.delimiter; } const v = (fields[i] as any)[key]; if (v) { line = line + writeValue(v, config); } } return line + config.newline; } } return ''; } export function toCSV(data: SeriesData[], config?: CSVConfig): string { let csv = ''; config = defaults(config, { delimiter: ',', newline: '\r\n', quoteChar: '"', encoding: '', headerStyle: CSVHeaderStyle.name, }); for (const series of data) { const { rows, fields } = series; if (config.headerStyle === CSVHeaderStyle.full) { csv = csv + getHeaderLine('name', fields, config) + getHeaderLine('type', fields, config) + getHeaderLine('unit', fields, config) + getHeaderLine('dateFormat', fields, config); } else if (config.headerStyle === CSVHeaderStyle.name) { for (let i = 0; i < fields.length; i++) { if (i > 0) { csv += config.delimiter; } csv += fields[i].name; } csv += config.newline; } const writers = fields.map(field => makeFieldWriter(field, config!)); for (let i = 0; i < rows.length; i++) { const row = rows[i]; for (let j = 0; j < row.length; j++) { if (j > 0) { csv = csv + config.delimiter; } const v = row[j]; if (v !== null) { csv = csv + writers[j](v); } } csv = csv + config.newline; } csv = csv + config.newline; } return csv; }
packages/grafana-ui/src/utils/csv.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00026312563568353653, 0.00017621772713027894, 0.00016589761071372777, 0.00017276089056394994, 0.00001686916220933199 ]
{ "id": 6, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: loadDatasourceReadyAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { containerWidth, range, datasourceInstance } = state;\n", " const { history } = action.payload;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 268 }
import { Moment } from 'moment'; export interface RawTimeRange { from: Moment | string; to: Moment | string; } export interface TimeRange { from: Moment; to: Moment; raw: RawTimeRange; } export interface IntervalValues { interval: string; // 10s,5m intervalMs: number; }
packages/grafana-ui/src/types/time.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017255857528652996, 0.0001719628635328263, 0.00017136716633103788, 0.0001719628635328263, 5.957044777460396e-7 ]
{ "id": 6, "code_window": [ " },\n", " })\n", " .addMapper({\n", " filter: loadDatasourceReadyAction,\n", " mapper: (state, action): ExploreItemState => {\n", " const { containerWidth, range, datasourceInstance } = state;\n", " const { history } = action.payload;\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 268 }
// Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build go1.9 package context import "context" // standard library's context, as of Go 1.7 // A Context carries a deadline, a cancelation signal, and other values across // API boundaries. // // Context's methods may be called by multiple goroutines simultaneously. type Context = context.Context // A CancelFunc tells an operation to abandon its work. // A CancelFunc does not wait for the work to stop. // After the first call, subsequent calls to a CancelFunc do nothing. type CancelFunc = context.CancelFunc
vendor/golang.org/x/net/context/go19.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017758201283868402, 0.00017229006334673613, 0.0001675958337727934, 0.0001716923143249005, 0.000004098691988474457 ]
{ "id": 7, "code_window": [ " const { history } = action.payload;\n", " const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth);\n", "\n", " return {\n", " ...state,\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 270 }
// Types import { Emitter } from 'app/core/core'; import { RawTimeRange, TimeRange, DataQuery, DataSourceSelectItem, DataSourceApi, QueryFixAction, LogLevel, } from '@grafana/ui/src/types'; import { ExploreId, ExploreItemState, HistoryItem, RangeScanner, ResultType, QueryTransaction, ExploreUIState, } from 'app/types/explore'; import { actionCreatorFactory, noPayloadActionCreatorFactory, ActionOf } from 'app/core/redux/actionCreatorFactory'; /** Higher order actions * */ export enum ActionTypes { SplitOpen = 'explore/SPLIT_OPEN', ResetExplore = 'explore/RESET_EXPLORE', } export interface SplitOpenAction { type: ActionTypes.SplitOpen; payload: { itemState: ExploreItemState; }; } export interface ResetExploreAction { type: ActionTypes.ResetExplore; payload: {}; } /** Lower order actions * */ export interface AddQueryRowPayload { exploreId: ExploreId; index: number; query: DataQuery; } export interface ChangeQueryPayload { exploreId: ExploreId; query: DataQuery; index: number; override: boolean; } export interface ChangeSizePayload { exploreId: ExploreId; width: number; height: number; } export interface ChangeTimePayload { exploreId: ExploreId; range: TimeRange; } export interface ClearQueriesPayload { exploreId: ExploreId; } export interface HighlightLogsExpressionPayload { exploreId: ExploreId; expressions: string[]; } export interface InitializeExplorePayload { exploreId: ExploreId; containerWidth: number; eventBridge: Emitter; queries: DataQuery[]; range: RawTimeRange; ui: ExploreUIState; } export interface LoadDatasourceFailurePayload { exploreId: ExploreId; error: string; } export interface LoadDatasourceMissingPayload { exploreId: ExploreId; } export interface LoadDatasourcePendingPayload { exploreId: ExploreId; requestedDatasourceName: string; } export interface LoadDatasourceReadyPayload { exploreId: ExploreId; history: HistoryItem[]; } export interface TestDatasourcePendingPayload { exploreId: ExploreId; } export interface TestDatasourceFailurePayload { exploreId: ExploreId; error: string; } export interface TestDatasourceSuccessPayload { exploreId: ExploreId; } export interface ModifyQueriesPayload { exploreId: ExploreId; modification: QueryFixAction; index: number; modifier: (query: DataQuery, modification: QueryFixAction) => DataQuery; } export interface QueryTransactionFailurePayload { exploreId: ExploreId; queryTransactions: QueryTransaction[]; } export interface QueryTransactionStartPayload { exploreId: ExploreId; resultType: ResultType; rowIndex: number; transaction: QueryTransaction; } export interface QueryTransactionSuccessPayload { exploreId: ExploreId; history: HistoryItem[]; queryTransactions: QueryTransaction[]; } export interface RemoveQueryRowPayload { exploreId: ExploreId; index: number; } export interface ScanStartPayload { exploreId: ExploreId; scanner: RangeScanner; } export interface ScanRangePayload { exploreId: ExploreId; range: RawTimeRange; } export interface ScanStopPayload { exploreId: ExploreId; } export interface SetQueriesPayload { exploreId: ExploreId; queries: DataQuery[]; } export interface SplitCloseActionPayload { itemId: ExploreId; } export interface SplitOpenPayload { itemState: ExploreItemState; } export interface ToggleTablePayload { exploreId: ExploreId; } export interface ToggleGraphPayload { exploreId: ExploreId; } export interface ToggleLogsPayload { exploreId: ExploreId; } export interface UpdateUIStatePayload extends Partial<ExploreUIState> { exploreId: ExploreId; } export interface UpdateDatasourceInstancePayload { exploreId: ExploreId; datasourceInstance: DataSourceApi; } export interface ToggleLogLevelPayload { exploreId: ExploreId; hiddenLogLevels: Set<LogLevel>; } export interface QueriesImportedPayload { exploreId: ExploreId; queries: DataQuery[]; } export interface LoadExploreDataSourcesPayload { exploreId: ExploreId; exploreDatasources: DataSourceSelectItem[]; } /** * Adds a query row after the row with the given index. */ export const addQueryRowAction = actionCreatorFactory<AddQueryRowPayload>('explore/ADD_QUERY_ROW').create(); /** * Loads a new datasource identified by the given name. */ export const changeDatasourceAction = noPayloadActionCreatorFactory('explore/CHANGE_DATASOURCE').create(); /** * Query change handler for the query row with the given index. * If `override` is reset the query modifications and run the queries. Use this to set queries via a link. */ export const changeQueryAction = actionCreatorFactory<ChangeQueryPayload>('explore/CHANGE_QUERY').create(); /** * Keep track of the Explore container size, in particular the width. * The width will be used to calculate graph intervals (number of datapoints). */ export const changeSizeAction = actionCreatorFactory<ChangeSizePayload>('explore/CHANGE_SIZE').create(); /** * Change the time range of Explore. Usually called from the Timepicker or a graph interaction. */ export const changeTimeAction = actionCreatorFactory<ChangeTimePayload>('explore/CHANGE_TIME').create(); /** * Clear all queries and results. */ export const clearQueriesAction = actionCreatorFactory<ClearQueriesPayload>('explore/CLEAR_QUERIES').create(); /** * Highlight expressions in the log results */ export const highlightLogsExpressionAction = actionCreatorFactory<HighlightLogsExpressionPayload>( 'explore/HIGHLIGHT_LOGS_EXPRESSION' ).create(); /** * Initialize Explore state with state from the URL and the React component. * Call this only on components for with the Explore state has not been initialized. */ export const initializeExploreAction = actionCreatorFactory<InitializeExplorePayload>( 'explore/INITIALIZE_EXPLORE' ).create(); /** * Display an error when no datasources have been configured */ export const loadDatasourceMissingAction = actionCreatorFactory<LoadDatasourceMissingPayload>( 'explore/LOAD_DATASOURCE_MISSING' ).create(); /** * Start the async process of loading a datasource to display a loading indicator */ export const loadDatasourcePendingAction = actionCreatorFactory<LoadDatasourcePendingPayload>( 'explore/LOAD_DATASOURCE_PENDING' ).create(); /** * Datasource loading was completed. */ export const loadDatasourceReadyAction = actionCreatorFactory<LoadDatasourceReadyPayload>( 'explore/LOAD_DATASOURCE_READY' ).create(); /** * Action to modify a query given a datasource-specific modifier action. * @param exploreId Explore area * @param modification Action object with a type, e.g., ADD_FILTER * @param index Optional query row index. If omitted, the modification is applied to all query rows. * @param modifier Function that executes the modification, typically `datasourceInstance.modifyQueries`. */ export const modifyQueriesAction = actionCreatorFactory<ModifyQueriesPayload>('explore/MODIFY_QUERIES').create(); /** * Mark a query transaction as failed with an error extracted from the query response. * The transaction will be marked as `done`. */ export const queryTransactionFailureAction = actionCreatorFactory<QueryTransactionFailurePayload>( 'explore/QUERY_TRANSACTION_FAILURE' ).create(); /** * Start a query transaction for the given result type. * @param exploreId Explore area * @param transaction Query options and `done` status. * @param resultType Associate the transaction with a result viewer, e.g., Graph * @param rowIndex Index is used to associate latency for this transaction with a query row */ export const queryTransactionStartAction = actionCreatorFactory<QueryTransactionStartPayload>( 'explore/QUERY_TRANSACTION_START' ).create(); /** * Complete a query transaction, mark the transaction as `done` and store query state in URL. * If the transaction was started by a scanner, it keeps on scanning for more results. * Side-effect: the query is stored in localStorage. * @param exploreId Explore area * @param transactionId ID * @param result Response from `datasourceInstance.query()` * @param latency Duration between request and response * @param queries Queries from all query rows * @param datasourceId Origin datasource instance, used to discard results if current datasource is different */ export const queryTransactionSuccessAction = actionCreatorFactory<QueryTransactionSuccessPayload>( 'explore/QUERY_TRANSACTION_SUCCESS' ).create(); /** * Remove query row of the given index, as well as associated query results. */ export const removeQueryRowAction = actionCreatorFactory<RemoveQueryRowPayload>('explore/REMOVE_QUERY_ROW').create(); export const runQueriesAction = noPayloadActionCreatorFactory('explore/RUN_QUERIES').create(); /** * Start a scan for more results using the given scanner. * @param exploreId Explore area * @param scanner Function that a) returns a new time range and b) triggers a query run for the new range */ export const scanStartAction = actionCreatorFactory<ScanStartPayload>('explore/SCAN_START').create(); export const scanRangeAction = actionCreatorFactory<ScanRangePayload>('explore/SCAN_RANGE').create(); /** * Stop any scanning for more results. */ export const scanStopAction = actionCreatorFactory<ScanStopPayload>('explore/SCAN_STOP').create(); /** * Reset queries to the given queries. Any modifications will be discarded. * Use this action for clicks on query examples. Triggers a query run. */ export const setQueriesAction = actionCreatorFactory<SetQueriesPayload>('explore/SET_QUERIES').create(); /** * Close the split view and save URL state. */ export const splitCloseAction = actionCreatorFactory<SplitCloseActionPayload>('explore/SPLIT_CLOSE').create(); /** * Open the split view and copy the left state to be the right state. * The right state is automatically initialized. * The copy keeps all query modifications but wipes the query results. */ export const splitOpenAction = actionCreatorFactory<SplitOpenPayload>('explore/SPLIT_OPEN').create(); export const stateSaveAction = noPayloadActionCreatorFactory('explore/STATE_SAVE').create(); /** * Update state of Explores UI elements (panels visiblity and deduplication strategy) */ export const updateUIStateAction = actionCreatorFactory<UpdateUIStatePayload>('explore/UPDATE_UI_STATE').create(); /** * Expand/collapse the table result viewer. When collapsed, table queries won't be run. */ export const toggleTableAction = actionCreatorFactory<ToggleTablePayload>('explore/TOGGLE_TABLE').create(); /** * Expand/collapse the graph result viewer. When collapsed, graph queries won't be run. */ export const toggleGraphAction = actionCreatorFactory<ToggleGraphPayload>('explore/TOGGLE_GRAPH').create(); /** * Expand/collapse the logs result viewer. When collapsed, log queries won't be run. */ export const toggleLogsAction = actionCreatorFactory<ToggleLogsPayload>('explore/TOGGLE_LOGS').create(); /** * Updates datasource instance before datasouce loading has started */ export const updateDatasourceInstanceAction = actionCreatorFactory<UpdateDatasourceInstancePayload>( 'explore/UPDATE_DATASOURCE_INSTANCE' ).create(); export const toggleLogLevelAction = actionCreatorFactory<ToggleLogLevelPayload>('explore/TOGGLE_LOG_LEVEL').create(); /** * Resets state for explore. */ export const resetExploreAction = noPayloadActionCreatorFactory('explore/RESET_EXPLORE').create(); export const queriesImportedAction = actionCreatorFactory<QueriesImportedPayload>('explore/QueriesImported').create(); export const testDataSourcePendingAction = actionCreatorFactory<TestDatasourcePendingPayload>( 'explore/TEST_DATASOURCE_PENDING' ).create(); export const testDataSourceSuccessAction = actionCreatorFactory<TestDatasourceSuccessPayload>( 'explore/TEST_DATASOURCE_SUCCESS' ).create(); export const testDataSourceFailureAction = actionCreatorFactory<TestDatasourceFailurePayload>( 'explore/TEST_DATASOURCE_FAILURE' ).create(); export const loadExploreDatasources = actionCreatorFactory<LoadExploreDataSourcesPayload>( 'explore/LOAD_EXPLORE_DATASOURCES' ).create(); export type HigherOrderAction = | ActionOf<SplitCloseActionPayload> | SplitOpenAction | ResetExploreAction | ActionOf<any>;
public/app/features/explore/state/actionTypes.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00602720258757472, 0.000543316244147718, 0.00016526042600162327, 0.0001886282698251307, 0.00101999391335994 ]
{ "id": 7, "code_window": [ " const { history } = action.payload;\n", " const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth);\n", "\n", " return {\n", " ...state,\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 270 }
/* * * Copyright 2017 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package grpc import ( "math/rand" "time" ) // DefaultBackoffConfig uses values specified for backoff in // https://github.com/grpc/grpc/blob/master/doc/connection-backoff.md. var DefaultBackoffConfig = BackoffConfig{ MaxDelay: 120 * time.Second, baseDelay: 1.0 * time.Second, factor: 1.6, jitter: 0.2, } // backoffStrategy defines the methodology for backing off after a grpc // connection failure. // // This is unexported until the gRPC project decides whether or not to allow // alternative backoff strategies. Once a decision is made, this type and its // method may be exported. type backoffStrategy interface { // backoff returns the amount of time to wait before the next retry given // the number of consecutive failures. backoff(retries int) time.Duration } // BackoffConfig defines the parameters for the default gRPC backoff strategy. type BackoffConfig struct { // MaxDelay is the upper bound of backoff delay. MaxDelay time.Duration // TODO(stevvooe): The following fields are not exported, as allowing // changes would violate the current gRPC specification for backoff. If // gRPC decides to allow more interesting backoff strategies, these fields // may be opened up in the future. // baseDelay is the amount of time to wait before retrying after the first // failure. baseDelay time.Duration // factor is applied to the backoff after each retry. factor float64 // jitter provides a range to randomize backoff delays. jitter float64 } func setDefaults(bc *BackoffConfig) { md := bc.MaxDelay *bc = DefaultBackoffConfig if md > 0 { bc.MaxDelay = md } } func (bc BackoffConfig) backoff(retries int) time.Duration { if retries == 0 { return bc.baseDelay } backoff, max := float64(bc.baseDelay), float64(bc.MaxDelay) for backoff < max && retries > 0 { backoff *= bc.factor retries-- } if backoff > max { backoff = max } // Randomize backoff delays so that if a cluster of requests start at // the same time, they won't operate in lockstep. backoff *= 1 + bc.jitter*(rand.Float64()*2-1) if backoff < 0 { return 0 } return time.Duration(backoff) }
vendor/google.golang.org/grpc/backoff.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0002486801822669804, 0.0001775250129867345, 0.0001646289456402883, 0.00016999356739688665, 0.000023965951186255552 ]
{ "id": 7, "code_window": [ " const { history } = action.payload;\n", " const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth);\n", "\n", " return {\n", " ...state,\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 270 }
// Copyright 2014 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package google import ( "encoding/json" "errors" "fmt" "strings" "time" "cloud.google.com/go/compute/metadata" "golang.org/x/net/context" "golang.org/x/oauth2" "golang.org/x/oauth2/jwt" ) // Endpoint is Google's OAuth 2.0 endpoint. var Endpoint = oauth2.Endpoint{ AuthURL: "https://accounts.google.com/o/oauth2/auth", TokenURL: "https://accounts.google.com/o/oauth2/token", } // JWTTokenURL is Google's OAuth 2.0 token URL to use with the JWT flow. const JWTTokenURL = "https://accounts.google.com/o/oauth2/token" // ConfigFromJSON uses a Google Developers Console client_credentials.json // file to construct a config. // client_credentials.json can be downloaded from // https://console.developers.google.com, under "Credentials". Download the Web // application credentials in the JSON format and provide the contents of the // file as jsonKey. func ConfigFromJSON(jsonKey []byte, scope ...string) (*oauth2.Config, error) { type cred struct { ClientID string `json:"client_id"` ClientSecret string `json:"client_secret"` RedirectURIs []string `json:"redirect_uris"` AuthURI string `json:"auth_uri"` TokenURI string `json:"token_uri"` } var j struct { Web *cred `json:"web"` Installed *cred `json:"installed"` } if err := json.Unmarshal(jsonKey, &j); err != nil { return nil, err } var c *cred switch { case j.Web != nil: c = j.Web case j.Installed != nil: c = j.Installed default: return nil, fmt.Errorf("oauth2/google: no credentials found") } if len(c.RedirectURIs) < 1 { return nil, errors.New("oauth2/google: missing redirect URL in the client_credentials.json") } return &oauth2.Config{ ClientID: c.ClientID, ClientSecret: c.ClientSecret, RedirectURL: c.RedirectURIs[0], Scopes: scope, Endpoint: oauth2.Endpoint{ AuthURL: c.AuthURI, TokenURL: c.TokenURI, }, }, nil } // JWTConfigFromJSON uses a Google Developers service account JSON key file to read // the credentials that authorize and authenticate the requests. // Create a service account on "Credentials" for your project at // https://console.developers.google.com to download a JSON key file. func JWTConfigFromJSON(jsonKey []byte, scope ...string) (*jwt.Config, error) { var f credentialsFile if err := json.Unmarshal(jsonKey, &f); err != nil { return nil, err } if f.Type != serviceAccountKey { return nil, fmt.Errorf("google: read JWT from JSON credentials: 'type' field is %q (expected %q)", f.Type, serviceAccountKey) } scope = append([]string(nil), scope...) // copy return f.jwtConfig(scope), nil } // JSON key file types. const ( serviceAccountKey = "service_account" userCredentialsKey = "authorized_user" ) // credentialsFile is the unmarshalled representation of a credentials file. type credentialsFile struct { Type string `json:"type"` // serviceAccountKey or userCredentialsKey // Service Account fields ClientEmail string `json:"client_email"` PrivateKeyID string `json:"private_key_id"` PrivateKey string `json:"private_key"` TokenURL string `json:"token_uri"` ProjectID string `json:"project_id"` // User Credential fields // (These typically come from gcloud auth.) ClientSecret string `json:"client_secret"` ClientID string `json:"client_id"` RefreshToken string `json:"refresh_token"` } func (f *credentialsFile) jwtConfig(scopes []string) *jwt.Config { cfg := &jwt.Config{ Email: f.ClientEmail, PrivateKey: []byte(f.PrivateKey), PrivateKeyID: f.PrivateKeyID, Scopes: scopes, TokenURL: f.TokenURL, } if cfg.TokenURL == "" { cfg.TokenURL = JWTTokenURL } return cfg } func (f *credentialsFile) tokenSource(ctx context.Context, scopes []string) (oauth2.TokenSource, error) { switch f.Type { case serviceAccountKey: cfg := f.jwtConfig(scopes) return cfg.TokenSource(ctx), nil case userCredentialsKey: cfg := &oauth2.Config{ ClientID: f.ClientID, ClientSecret: f.ClientSecret, Scopes: scopes, Endpoint: Endpoint, } tok := &oauth2.Token{RefreshToken: f.RefreshToken} return cfg.TokenSource(ctx, tok), nil case "": return nil, errors.New("missing 'type' field in credentials") default: return nil, fmt.Errorf("unknown credential type: %q", f.Type) } } // ComputeTokenSource returns a token source that fetches access tokens // from Google Compute Engine (GCE)'s metadata server. It's only valid to use // this token source if your program is running on a GCE instance. // If no account is specified, "default" is used. // Further information about retrieving access tokens from the GCE metadata // server can be found at https://cloud.google.com/compute/docs/authentication. func ComputeTokenSource(account string) oauth2.TokenSource { return oauth2.ReuseTokenSource(nil, computeSource{account: account}) } type computeSource struct { account string } func (cs computeSource) Token() (*oauth2.Token, error) { if !metadata.OnGCE() { return nil, errors.New("oauth2/google: can't get a token from the metadata service; not running on GCE") } acct := cs.account if acct == "" { acct = "default" } tokenJSON, err := metadata.Get("instance/service-accounts/" + acct + "/token") if err != nil { return nil, err } var res struct { AccessToken string `json:"access_token"` ExpiresInSec int `json:"expires_in"` TokenType string `json:"token_type"` } err = json.NewDecoder(strings.NewReader(tokenJSON)).Decode(&res) if err != nil { return nil, fmt.Errorf("oauth2/google: invalid token JSON from metadata: %v", err) } if res.ExpiresInSec == 0 || res.AccessToken == "" { return nil, fmt.Errorf("oauth2/google: incomplete token received from metadata") } return &oauth2.Token{ AccessToken: res.AccessToken, TokenType: res.TokenType, Expiry: time.Now().Add(time.Duration(res.ExpiresInSec) * time.Second), }, nil }
vendor/golang.org/x/oauth2/google/google.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017789960838854313, 0.00017110921908169985, 0.00016655141371302307, 0.00017189327627420425, 0.0000028142358132754453 ]
{ "id": 7, "code_window": [ " const { history } = action.payload;\n", " const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth);\n", "\n", " return {\n", " ...state,\n" ], "labels": [ "keep", "replace", "replace", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 270 }
import _ from 'lodash'; import React, { PureComponent } from 'react'; import ReactTable from 'react-table'; import TableModel from 'app/core/table_model'; const EMPTY_TABLE = new TableModel(); // Identify columns that contain values const VALUE_REGEX = /^[Vv]alue #\d+/; interface TableProps { data: TableModel; loading: boolean; onClickCell?: (columnKey: string, rowValue: string) => void; } function prepareRows(rows, columnNames) { return rows.map(cells => _.zipObject(columnNames, cells)); } export default class Table extends PureComponent<TableProps> { getCellProps = (state, rowInfo, column) => { return { onClick: (e: React.SyntheticEvent) => { // Only handle click on link, not the cell if (e.target) { const link = e.target as HTMLElement; if (link.className === 'link') { const columnKey = column.Header; const rowValue = rowInfo.row[columnKey]; this.props.onClickCell(columnKey, rowValue); } } }, }; }; render() { const { data, loading } = this.props; const tableModel = data || EMPTY_TABLE; const columnNames = tableModel.columns.map(({ text }) => text); const columns = tableModel.columns.map(({ filterable, text }) => ({ Header: () => <span title={text}>{text}</span>, accessor: text, className: VALUE_REGEX.test(text) ? 'text-right' : '', show: text !== 'Time', Cell: row => ( <span className={filterable ? 'link' : ''} title={text + ': ' + row.value}> {row.value} </span> ), })); const noDataText = data ? 'The queries returned no data for a table.' : ''; return ( <ReactTable columns={columns} data={tableModel.rows} getTdProps={this.getCellProps} loading={loading} minRows={0} noDataText={noDataText} resolveData={data => prepareRows(data, columnNames)} showPagination={Boolean(data)} /> ); } }
public/app/features/explore/Table.tsx
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.0004915135796181858, 0.00021522435417864472, 0.00016498190234415233, 0.00017059114179573953, 0.00011283503408776596 ]
{ "id": 8, "code_window": [ " return {\n", " ...state,\n", " queryIntervals,\n", " history,\n", " datasourceLoading: false,\n", " datasourceMissing: false,\n", " logsHighlighterExpressions: undefined,\n", " queryTransactions: [],\n" ], "labels": [ "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 274 }
// @ts-ignore import _ from 'lodash'; import { calculateResultsFromQueryTransactions, generateEmptyQuery, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, QueryTransaction, ExploreId, ExploreUpdateState } from 'app/types/explore'; import { DataQuery } from '@grafana/ui/src/types'; import { HigherOrderAction, ActionTypes, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { addQueryRowAction, changeQueryAction, changeSizeAction, changeTimeAction, clearQueriesAction, highlightLogsExpressionAction, initializeExploreAction, updateDatasourceInstanceAction, loadDatasourceMissingAction, loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, removeQueryRowAction, scanRangeAction, scanStartAction, scanStopAction, setQueriesAction, toggleGraphAction, toggleLogsAction, toggleTableAction, queriesImportedAction, updateUIStateAction, toggleLogLevelAction, } from './actionTypes'; import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; export const DEFAULT_RANGE = { from: 'now-6h', to: 'now', }; // Millies step for helper bar charts const DEFAULT_GRAPH_INTERVAL = 15 * 1000; export const makeInitialUpdateState = (): ExploreUpdateState => ({ datasource: false, queries: false, range: false, ui: false, }); /** * Returns a fresh Explore area state */ export const makeExploreItemState = (): ExploreItemState => ({ StartPage: undefined, containerWidth: 0, datasourceInstance: null, requestedDatasourceName: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, exploreDatasources: [], history: [], queries: [], initialized: false, queryTransactions: [], queryIntervals: { interval: '15s', intervalMs: DEFAULT_GRAPH_INTERVAL }, range: DEFAULT_RANGE, scanning: false, scanRange: null, showingGraph: true, showingLogs: true, showingTable: true, supportsGraph: null, supportsLogs: null, supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), }); /** * Global Explore state that handles multiple Explore areas and the split state */ export const initialExploreState: ExploreState = { split: null, left: makeExploreItemState(), right: makeExploreItemState(), }; /** * Reducer for an Explore area, to be used by the global Explore reducer. */ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemState) .addMapper({ filter: addQueryRowAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { index, query } = action.payload; // Add to queries, which will cause a new row to be rendered const nextQueries = [...queries.slice(0, index + 1), { ...query }, ...queries.slice(index + 1)]; // Ongoing transactions need to update their row indices const nextQueryTransactions = queryTransactions.map(qt => { if (qt.rowIndex > index) { return { ...qt, rowIndex: qt.rowIndex + 1, }; } return qt; }); return { ...state, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeQueryAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { query, index } = action.payload; // Override path: queries are completely reset const nextQuery: DataQuery = { ...query, ...generateEmptyQuery(state.queries) }; const nextQueries = [...queries]; nextQueries[index] = nextQuery; // Discard ongoing transaction related to row query const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index); return { ...state, queries: nextQueries, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeSizeAction, mapper: (state, action): ExploreItemState => { const { range, datasourceInstance } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; } const containerWidth = action.payload.width; const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, containerWidth, queryIntervals }; }, }) .addMapper({ filter: changeTimeAction, mapper: (state, action): ExploreItemState => { return { ...state, range: action.payload.range }; }, }) .addMapper({ filter: clearQueriesAction, mapper: (state): ExploreItemState => { const queries = ensureQueries(); return { ...state, queries: queries.slice(), queryTransactions: [], showingStartPage: Boolean(state.StartPage), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: highlightLogsExpressionAction, mapper: (state, action): ExploreItemState => { const { expressions } = action.payload; return { ...state, logsHighlighterExpressions: expressions }; }, }) .addMapper({ filter: initializeExploreAction, mapper: (state, action): ExploreItemState => { const { containerWidth, eventBridge, queries, range, ui } = action.payload; return { ...state, containerWidth, eventBridge, range, queries, initialized: true, queryKeys: getQueryKeys(queries, state.datasourceInstance), ...ui, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: updateDatasourceInstanceAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance } = action.payload; // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; const supportsTable = datasourceInstance.meta.tables; // Custom components const StartPage = datasourceInstance.components.ExploreStartPage; return { ...state, datasourceInstance, supportsGraph, supportsLogs, supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), }; }, }) .addMapper({ filter: loadDatasourceMissingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceMissing: true, datasourceLoading: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadDatasourcePendingAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceLoading: true, requestedDatasourceName: action.payload.requestedDatasourceName, }; }, }) .addMapper({ filter: loadDatasourceReadyAction, mapper: (state, action): ExploreItemState => { const { containerWidth, range, datasourceInstance } = state; const { history } = action.payload; const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth); return { ...state, queryIntervals, history, datasourceLoading: false, datasourceMissing: false, logsHighlighterExpressions: undefined, queryTransactions: [], update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: modifyQueriesAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { modification, index, modifier } = action.payload; let nextQueries: DataQuery[]; let nextQueryTransactions: QueryTransaction[]; if (index === undefined) { // Modify all queries nextQueries = queries.map((query, i) => ({ ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries), })); // Discard all ongoing transactions nextQueryTransactions = []; } else { // Modify query only at index nextQueries = queries.map((query, i) => { // Synchronize all queries with local query cache to ensure consistency // TODO still needed? return i === index ? { ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries) } : query; }); nextQueryTransactions = queryTransactions // Consume the hint corresponding to the action .map(qt => { if (qt.hints != null && qt.rowIndex === index) { qt.hints = qt.hints.filter(hint => hint.fix.action !== modification); } return qt; }) // Preserve previous row query transaction to keep results visible if next query is incomplete .filter(qt => modification.preventSubmit || qt.rowIndex !== index); } return { ...state, queries: nextQueries, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), queryTransactions: nextQueryTransactions, }; }, }) .addMapper({ filter: queryTransactionFailureAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = action.payload; return { ...state, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionStartAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = state; const { resultType, rowIndex, transaction } = action.payload; // Discarding existing transactions of same type const remainingTransactions = queryTransactions.filter( qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex) ); // Append new transaction const nextQueryTransactions: QueryTransaction[] = [...remainingTransactions, transaction]; return { ...state, queryTransactions: nextQueryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionSuccessAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queryIntervals } = state; const { history, queryTransactions } = action.payload; const results = calculateResultsFromQueryTransactions( queryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, history, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queries, queryIntervals, queryTransactions, queryKeys } = state; const { index } = action.payload; if (queries.length <= 1) { return state; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; const nextQueryKeys = [...queryKeys.slice(0, index), ...queryKeys.slice(index + 1)]; // Discard transactions related to row query const nextQueryTransactions = queryTransactions.filter(qt => nextQueries.some(nq => nq.key === qt.query.key)); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: nextQueryKeys, }; }, }) .addMapper({ filter: scanRangeAction, mapper: (state, action): ExploreItemState => { return { ...state, scanRange: action.payload.range }; }, }) .addMapper({ filter: scanStartAction, mapper: (state, action): ExploreItemState => { return { ...state, scanning: true, scanner: action.payload.scanner }; }, }) .addMapper({ filter: scanStopAction, mapper: (state): ExploreItemState => { const { queryTransactions } = state; const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done); return { ...state, queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined, scanner: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: setQueriesAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries: queries.slice(), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: updateUIStateAction, mapper: (state, action): ExploreItemState => { return { ...state, ...action.payload }; }, }) .addMapper({ filter: toggleGraphAction, mapper: (state): ExploreItemState => { const showingGraph = !state.showingGraph; let nextQueryTransactions = state.queryTransactions; if (!showingGraph) { // Discard transactions related to Graph query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleLogsAction, mapper: (state): ExploreItemState => { const showingLogs = !state.showingLogs; let nextQueryTransactions = state.queryTransactions; if (!showingLogs) { // Discard transactions related to Logs query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleTableAction, mapper: (state): ExploreItemState => { const showingTable = !state.showingTable; if (showingTable) { return { ...state, queryTransactions: state.queryTransactions }; } // Toggle off needs discarding of table queries and results const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table'); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, state.datasourceInstance, state.queryIntervals.intervalMs ); return { ...state, ...results, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: queriesImportedAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries, queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: toggleLogLevelAction, mapper: (state, action): ExploreItemState => { const { hiddenLogLevels } = action.payload; return { ...state, hiddenLogLevels: Array.from(hiddenLogLevels), }; }, }) .addMapper({ filter: testDataSourcePendingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceSuccessAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceFailureAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceError: action.payload.error, queryTransactions: [], graphResult: undefined, tableResult: undefined, logsResult: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadExploreDatasources, mapper: (state, action): ExploreItemState => { return { ...state, exploreDatasources: action.payload.exploreDatasources, }; }, }) .create(); export const updateChildRefreshState = ( state: Readonly<ExploreItemState>, payload: LocationUpdate, exploreId: ExploreId ): ExploreItemState => { const path = payload.path || ''; const queryState = payload.query[exploreId] as string; if (!queryState) { return state; } const urlState = parseUrlState(queryState); if (!state.urlState || path !== '/explore') { // we only want to refresh when browser back/forward return { ...state, urlState, update: { datasource: false, queries: false, range: false, ui: false } }; } const datasource = _.isEqual(urlState ? urlState.datasource : '', state.urlState.datasource) === false; const queries = _.isEqual(urlState ? urlState.queries : [], state.urlState.queries) === false; const range = _.isEqual(urlState ? urlState.range : DEFAULT_RANGE, state.urlState.range) === false; const ui = _.isEqual(urlState ? urlState.ui : DEFAULT_UI_STATE, state.urlState.ui) === false; return { ...state, urlState, update: { ...state.update, datasource, queries, range, ui, }, }; }; /** * Global Explore reducer that handles multiple Explore areas (left and right). * Actions that have an `exploreId` get routed to the ExploreItemReducer. */ export const exploreReducer = (state = initialExploreState, action: HigherOrderAction): ExploreState => { switch (action.type) { case splitCloseAction.type: { const { itemId } = action.payload as SplitCloseActionPayload; const targetSplit = { left: itemId === ExploreId.left ? state.right : state.left, right: initialExploreState.right, }; return { ...state, ...targetSplit, split: false, }; } case ActionTypes.SplitOpen: { return { ...state, split: true, right: { ...action.payload.itemState } }; } case ActionTypes.ResetExplore: { return initialExploreState; } case updateLocation.type: { const { query } = action.payload; if (!query || !query[ExploreId.left]) { return state; } const split = query[ExploreId.right] ? true : false; const leftState = state[ExploreId.left]; const rightState = state[ExploreId.right]; return { ...state, split, [ExploreId.left]: updateChildRefreshState(leftState, action.payload, ExploreId.left), [ExploreId.right]: updateChildRefreshState(rightState, action.payload, ExploreId.right), }; } } if (action.payload) { const { exploreId } = action.payload as any; if (exploreId !== undefined) { const exploreItemState = state[exploreId]; return { ...state, [exploreId]: itemReducer(exploreItemState, action) }; } } return state; }; export default { explore: exploreReducer, };
public/app/features/explore/state/reducers.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9960384368896484, 0.01983383484184742, 0.00016508385306224227, 0.0006267032586038113, 0.1216370239853859 ]
{ "id": 8, "code_window": [ " return {\n", " ...state,\n", " queryIntervals,\n", " history,\n", " datasourceLoading: false,\n", " datasourceMissing: false,\n", " logsHighlighterExpressions: undefined,\n", " queryTransactions: [],\n" ], "labels": [ "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 274 }
<h3 class="page-heading">CloudWatch details</h3> <div class="gf-form-group max-width-30"> <div class="gf-form gf-form-select-wrapper"> <label class="gf-form-label width-13">Auth Provider</label> <select class="gf-form-input gf-max-width-13" ng-model="ctrl.current.jsonData.authType" ng-options="f.value as f.name for f in ctrl.authTypes"></select> </div> <div class="gf-form" ng-show='ctrl.current.jsonData.authType == "credentials"'> <label class="gf-form-label width-13">Credentials profile name</label> <input type="text" class="gf-form-input max-width-18 gf-form-input--has-help-icon" ng-model='ctrl.current.database' placeholder="default"></input> <info-popover mode="right-absolute"> Credentials profile name, as specified in ~/.aws/credentials, leave blank for default </info-popover> </div> <div class="gf-form" ng-show='ctrl.current.jsonData.authType == "keys"'> <label class="gf-form-label width-13">Access key ID </label> <label class="gf-form-label width-13" ng-show="ctrl.accessKeyExist">Configured</label> <a class="btn btn-secondary gf-form-btn" type="submit" ng-click="ctrl.resetAccessKey()" ng-show="ctrl.accessKeyExist">Reset</a> <input type="text" class="gf-form-input max-width-18" ng-hide="ctrl.accessKeyExist" ng-model='ctrl.current.secureJsonData.accessKey'></input> </div> <div class="gf-form" ng-show='ctrl.current.jsonData.authType == "keys"'> <label class="gf-form-label width-13">Secret access key</label> <label class="gf-form-label width-13" ng-show="ctrl.secretKeyExist">Configured</label> <a class="btn btn-secondary gf-form-btn" type="submit" ng-click="ctrl.resetSecretKey()" ng-show="ctrl.secretKeyExist">Reset</a> <input type="text" class="gf-form-input max-width-18" ng-hide="ctrl.secretKeyExist" ng-model='ctrl.current.secureJsonData.secretKey'></input> </div> <div class="gf-form" ng-show='ctrl.current.jsonData.authType == "arn"'> <label class="gf-form-label width-13">Assume Role ARN</label> <input type="text" class="gf-form-input max-width-18 gf-form-input--has-help-icon" ng-model='ctrl.current.jsonData.assumeRoleArn' placeholder="arn:aws:iam:*"></input> <info-popover mode="right-absolute"> ARN of Assume Role </info-popover> </div> <div class="gf-form"> <label class="gf-form-label width-13">Default Region</label> <div class="gf-form-select-wrapper max-width-18 gf-form-select-wrapper--has-help-icon"> <select class="gf-form-input" ng-model="ctrl.current.jsonData.defaultRegion" ng-options="region for region in ctrl.regions"></select> <info-popover mode="right-absolute"> Specify the region, such as for US West (Oregon) use ` us-west-2 ` as the region. </info-popover> </div> </div> <div class="gf-form"> <label class="gf-form-label width-13">Custom Metrics</label> <input type="text" class="gf-form-input max-width-18 gf-form-input--has-help-icon" ng-model='ctrl.current.jsonData.customMetricsNamespaces' placeholder="Namespace1,Namespace2"></input> <info-popover mode="right-absolute"> Namespaces of Custom Metrics </info-popover> </div> </div>
public/app/plugins/datasource/cloudwatch/partials/config.html
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017509459576103836, 0.00017093645874410868, 0.00016638920351397246, 0.00017094548093155026, 0.000002882273065551999 ]
{ "id": 8, "code_window": [ " return {\n", " ...state,\n", " queryIntervals,\n", " history,\n", " datasourceLoading: false,\n", " datasourceMissing: false,\n", " logsHighlighterExpressions: undefined,\n", " queryTransactions: [],\n" ], "labels": [ "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 274 }
import _ from 'lodash'; import coreModule from 'app/core/core_module'; import { ThresholdMapper } from './state/ThresholdMapper'; import { QueryPart } from 'app/core/components/query_part/query_part'; import alertDef from './state/alertDef'; import config from 'app/core/config'; import appEvents from 'app/core/app_events'; export class AlertTabCtrl { panel: any; panelCtrl: any; subTabIndex: number; conditionTypes: any; alert: any; conditionModels: any; evalFunctions: any; evalOperators: any; noDataModes: any; executionErrorModes: any; addNotificationSegment; notifications; alertNotifications; error: string; appSubUrl: string; alertHistory: any; /** @ngInject */ constructor( private $scope, private backendSrv, private dashboardSrv, private uiSegmentSrv, private $q, private datasourceSrv ) { this.panelCtrl = $scope.ctrl; this.panel = this.panelCtrl.panel; this.$scope.ctrl = this; this.subTabIndex = 0; this.evalFunctions = alertDef.evalFunctions; this.evalOperators = alertDef.evalOperators; this.conditionTypes = alertDef.conditionTypes; this.noDataModes = alertDef.noDataModes; this.executionErrorModes = alertDef.executionErrorModes; this.appSubUrl = config.appSubUrl; this.panelCtrl._enableAlert = this.enable; } $onInit() { this.addNotificationSegment = this.uiSegmentSrv.newPlusButton(); // subscribe to graph threshold handle changes const thresholdChangedEventHandler = this.graphThresholdChanged.bind(this); this.panelCtrl.events.on('threshold-changed', thresholdChangedEventHandler); // set panel alert edit mode this.$scope.$on('$destroy', () => { this.panelCtrl.events.off('threshold-changed', thresholdChangedEventHandler); this.panelCtrl.editingThresholds = false; this.panelCtrl.render(); }); // build notification model this.notifications = []; this.alertNotifications = []; this.alertHistory = []; return this.backendSrv.get('/api/alert-notifications').then(res => { this.notifications = res; this.initModel(); this.validateModel(); }); } getAlertHistory() { this.backendSrv .get(`/api/annotations?dashboardId=${this.panelCtrl.dashboard.id}&panelId=${this.panel.id}&limit=50&type=alert`) .then(res => { this.alertHistory = _.map(res, ah => { ah.time = this.dashboardSrv.getCurrent().formatDate(ah.time, 'MMM D, YYYY HH:mm:ss'); ah.stateModel = alertDef.getStateDisplayModel(ah.newState); ah.info = alertDef.getAlertAnnotationInfo(ah); return ah; }); }); } getNotificationIcon(type): string { switch (type) { case 'email': return 'fa fa-envelope'; case 'slack': return 'fa fa-slack'; case 'victorops': return 'fa fa-pagelines'; case 'webhook': return 'fa fa-cubes'; case 'pagerduty': return 'fa fa-bullhorn'; case 'opsgenie': return 'fa fa-bell'; case 'hipchat': return 'fa fa-mail-forward'; case 'pushover': return 'fa fa-mobile'; case 'kafka': return 'fa fa-random'; case 'teams': return 'fa fa-windows'; } return 'fa fa-bell'; } getNotifications() { return this.$q.when( this.notifications.map(item => { return this.uiSegmentSrv.newSegment(item.name); }) ); } changeTabIndex(newTabIndex) { this.subTabIndex = newTabIndex; if (this.subTabIndex === 2) { this.getAlertHistory(); } } notificationAdded() { const model = _.find(this.notifications, { name: this.addNotificationSegment.value, }); if (!model) { return; } this.alertNotifications.push({ name: model.name, iconClass: this.getNotificationIcon(model.type), isDefault: false, uid: model.uid, }); // avoid duplicates using both id and uid to be backwards compatible. if (!_.find(this.alert.notifications, n => n.id === model.id || n.uid === model.uid)) { this.alert.notifications.push({ uid: model.uid }); } // reset plus button this.addNotificationSegment.value = this.uiSegmentSrv.newPlusButton().value; this.addNotificationSegment.html = this.uiSegmentSrv.newPlusButton().html; this.addNotificationSegment.fake = true; } removeNotification(an) { // remove notifiers refeered to by id and uid to support notifiers added // before and after we added support for uid _.remove(this.alert.notifications, n => n.uid === an.uid || n.id === an.id); _.remove(this.alertNotifications, n => n.uid === an.uid || n.id === an.id); } initModel() { const alert = (this.alert = this.panel.alert); if (!alert) { return; } alert.conditions = alert.conditions || []; if (alert.conditions.length === 0) { alert.conditions.push(this.buildDefaultCondition()); } alert.noDataState = alert.noDataState || config.alertingNoDataOrNullValues; alert.executionErrorState = alert.executionErrorState || config.alertingErrorOrTimeout; alert.frequency = alert.frequency || '1m'; alert.handler = alert.handler || 1; alert.notifications = alert.notifications || []; alert.for = alert.for || '0m'; const defaultName = this.panel.title + ' alert'; alert.name = alert.name || defaultName; this.conditionModels = _.reduce( alert.conditions, (memo, value) => { memo.push(this.buildConditionModel(value)); return memo; }, [] ); ThresholdMapper.alertToGraphThresholds(this.panel); for (const addedNotification of alert.notifications) { // lookup notifier type by uid let model = _.find(this.notifications, { uid: addedNotification.uid }); // fallback to using id if uid is missing if (!model) { model = _.find(this.notifications, { id: addedNotification.id }); } if (model && model.isDefault === false) { model.iconClass = this.getNotificationIcon(model.type); this.alertNotifications.push(model); } } for (const notification of this.notifications) { if (notification.isDefault) { notification.iconClass = this.getNotificationIcon(notification.type); notification.bgColor = '#00678b'; this.alertNotifications.push(notification); } } this.panelCtrl.editingThresholds = true; this.panelCtrl.render(); } graphThresholdChanged(evt) { for (const condition of this.alert.conditions) { if (condition.type === 'query') { condition.evaluator.params[evt.handleIndex] = evt.threshold.value; this.evaluatorParamsChanged(); break; } } } buildDefaultCondition() { return { type: 'query', query: { params: ['A', '5m', 'now'] }, reducer: { type: 'avg', params: [] }, evaluator: { type: 'gt', params: [null] }, operator: { type: 'and' }, }; } validateModel() { if (!this.alert) { return; } let firstTarget; let foundTarget = null; for (const condition of this.alert.conditions) { if (condition.type !== 'query') { continue; } for (const target of this.panel.targets) { if (!firstTarget) { firstTarget = target; } if (condition.query.params[0] === target.refId) { foundTarget = target; break; } } if (!foundTarget) { if (firstTarget) { condition.query.params[0] = firstTarget.refId; foundTarget = firstTarget; } else { this.error = 'Could not find any metric queries'; } } const datasourceName = foundTarget.datasource || this.panel.datasource; this.datasourceSrv.get(datasourceName).then(ds => { if (!ds.meta.alerting) { this.error = 'The datasource does not support alerting queries'; } else if (ds.targetContainsTemplate && ds.targetContainsTemplate(foundTarget)) { this.error = 'Template variables are not supported in alert queries'; } else { this.error = ''; } }); } } buildConditionModel(source) { const cm: any = { source: source, type: source.type }; cm.queryPart = new QueryPart(source.query, alertDef.alertQueryDef); cm.reducerPart = alertDef.createReducerPart(source.reducer); cm.evaluator = source.evaluator; cm.operator = source.operator; return cm; } handleQueryPartEvent(conditionModel, evt) { switch (evt.name) { case 'action-remove-part': { break; } case 'get-part-actions': { return this.$q.when([]); } case 'part-param-changed': { this.validateModel(); } case 'get-param-options': { const result = this.panel.targets.map(target => { return this.uiSegmentSrv.newSegment({ value: target.refId }); }); return this.$q.when(result); } } } handleReducerPartEvent(conditionModel, evt) { switch (evt.name) { case 'action': { conditionModel.source.reducer.type = evt.action.value; conditionModel.reducerPart = alertDef.createReducerPart(conditionModel.source.reducer); break; } case 'get-part-actions': { const result = []; for (const type of alertDef.reducerTypes) { if (type.value !== conditionModel.source.reducer.type) { result.push(type); } } return this.$q.when(result); } } } addCondition(type) { const condition = this.buildDefaultCondition(); // add to persited model this.alert.conditions.push(condition); // add to view model this.conditionModels.push(this.buildConditionModel(condition)); } removeCondition(index) { this.alert.conditions.splice(index, 1); this.conditionModels.splice(index, 1); } delete() { appEvents.emit('confirm-modal', { title: 'Delete Alert', text: 'Are you sure you want to delete this alert rule?', text2: 'You need to save dashboard for the delete to take effect', icon: 'fa-trash', yesText: 'Delete', onConfirm: () => { delete this.panel.alert; this.alert = null; this.panel.thresholds = []; this.conditionModels = []; this.panelCtrl.alertState = null; this.panelCtrl.render(); }, }); } enable = () => { this.panel.alert = {}; this.initModel(); this.panel.alert.for = '5m'; //default value for new alerts. for existing alerts we use 0m to avoid breaking changes }; evaluatorParamsChanged() { ThresholdMapper.alertToGraphThresholds(this.panel); this.panelCtrl.render(); } evaluatorTypeChanged(evaluator) { // ensure params array is correct length switch (evaluator.type) { case 'lt': case 'gt': { evaluator.params = [evaluator.params[0]]; break; } case 'within_range': case 'outside_range': { evaluator.params = [evaluator.params[0], evaluator.params[1]]; break; } case 'no_value': { evaluator.params = []; } } this.evaluatorParamsChanged(); } clearHistory() { appEvents.emit('confirm-modal', { title: 'Delete Alert History', text: 'Are you sure you want to remove all history & annotations for this alert?', icon: 'fa-trash', yesText: 'Yes', onConfirm: () => { this.backendSrv .post('/api/annotations/mass-delete', { dashboardId: this.panelCtrl.dashboard.id, panelId: this.panel.id, }) .then(res => { this.alertHistory = []; this.panelCtrl.refresh(); }); }, }); } } /** @ngInject */ export function alertTab() { 'use strict'; return { restrict: 'E', scope: true, templateUrl: 'public/app/features/alerting/partials/alert_tab.html', controller: AlertTabCtrl, }; } coreModule.directive('alertTab', alertTab);
public/app/features/alerting/AlertTabCtrl.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.000417072995333001, 0.00017810091958381236, 0.00016579324437770993, 0.00017251630197279155, 0.00003652618397609331 ]
{ "id": 8, "code_window": [ " return {\n", " ...state,\n", " queryIntervals,\n", " history,\n", " datasourceLoading: false,\n", " datasourceMissing: false,\n", " logsHighlighterExpressions: undefined,\n", " queryTransactions: [],\n" ], "labels": [ "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [], "file_path": "public/app/features/explore/state/reducers.ts", "type": "replace", "edit_start_line_idx": 274 }
{ "stable": "6.1.3", "testing": "6.1.3" }
latest.json
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017164304153993726, 0.00017164304153993726, 0.00017164304153993726, 0.00017164304153993726, 0 ]
{ "id": 9, "code_window": [ " ...state,\n", " exploreDatasources: action.payload.exploreDatasources,\n", " };\n", " },\n", " })\n", " .create();\n", "\n", "export const updateChildRefreshState = (\n", " state: Readonly<ExploreItemState>,\n", " payload: LocationUpdate,\n" ], "labels": [ "keep", "keep", "keep", "keep", "add", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " .addMapper({\n", " filter: runQueriesAction,\n", " mapper: (state): ExploreItemState => {\n", " const { range, datasourceInstance, containerWidth } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return {\n", " ...state,\n", " queryIntervals,\n", " };\n", " },\n", " })\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 561 }
// @ts-ignore import _ from 'lodash'; import { calculateResultsFromQueryTransactions, generateEmptyQuery, getIntervals, ensureQueries, getQueryKeys, parseUrlState, DEFAULT_UI_STATE, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, QueryTransaction, ExploreId, ExploreUpdateState } from 'app/types/explore'; import { DataQuery } from '@grafana/ui/src/types'; import { HigherOrderAction, ActionTypes, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { addQueryRowAction, changeQueryAction, changeSizeAction, changeTimeAction, clearQueriesAction, highlightLogsExpressionAction, initializeExploreAction, updateDatasourceInstanceAction, loadDatasourceMissingAction, loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, queryTransactionFailureAction, queryTransactionStartAction, queryTransactionSuccessAction, removeQueryRowAction, scanRangeAction, scanStartAction, scanStopAction, setQueriesAction, toggleGraphAction, toggleLogsAction, toggleTableAction, queriesImportedAction, updateUIStateAction, toggleLogLevelAction, } from './actionTypes'; import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; export const DEFAULT_RANGE = { from: 'now-6h', to: 'now', }; // Millies step for helper bar charts const DEFAULT_GRAPH_INTERVAL = 15 * 1000; export const makeInitialUpdateState = (): ExploreUpdateState => ({ datasource: false, queries: false, range: false, ui: false, }); /** * Returns a fresh Explore area state */ export const makeExploreItemState = (): ExploreItemState => ({ StartPage: undefined, containerWidth: 0, datasourceInstance: null, requestedDatasourceName: null, datasourceError: null, datasourceLoading: null, datasourceMissing: false, exploreDatasources: [], history: [], queries: [], initialized: false, queryTransactions: [], queryIntervals: { interval: '15s', intervalMs: DEFAULT_GRAPH_INTERVAL }, range: DEFAULT_RANGE, scanning: false, scanRange: null, showingGraph: true, showingLogs: true, showingTable: true, supportsGraph: null, supportsLogs: null, supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), }); /** * Global Explore state that handles multiple Explore areas and the split state */ export const initialExploreState: ExploreState = { split: null, left: makeExploreItemState(), right: makeExploreItemState(), }; /** * Reducer for an Explore area, to be used by the global Explore reducer. */ export const itemReducer = reducerFactory<ExploreItemState>({} as ExploreItemState) .addMapper({ filter: addQueryRowAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { index, query } = action.payload; // Add to queries, which will cause a new row to be rendered const nextQueries = [...queries.slice(0, index + 1), { ...query }, ...queries.slice(index + 1)]; // Ongoing transactions need to update their row indices const nextQueryTransactions = queryTransactions.map(qt => { if (qt.rowIndex > index) { return { ...qt, rowIndex: qt.rowIndex + 1, }; } return qt; }); return { ...state, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeQueryAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { query, index } = action.payload; // Override path: queries are completely reset const nextQuery: DataQuery = { ...query, ...generateEmptyQuery(state.queries) }; const nextQueries = [...queries]; nextQueries[index] = nextQuery; // Discard ongoing transaction related to row query const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index); return { ...state, queries: nextQueries, queryTransactions: nextQueryTransactions, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), }; }, }) .addMapper({ filter: changeSizeAction, mapper: (state, action): ExploreItemState => { const { range, datasourceInstance } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; } const containerWidth = action.payload.width; const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, containerWidth, queryIntervals }; }, }) .addMapper({ filter: changeTimeAction, mapper: (state, action): ExploreItemState => { return { ...state, range: action.payload.range }; }, }) .addMapper({ filter: clearQueriesAction, mapper: (state): ExploreItemState => { const queries = ensureQueries(); return { ...state, queries: queries.slice(), queryTransactions: [], showingStartPage: Boolean(state.StartPage), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: highlightLogsExpressionAction, mapper: (state, action): ExploreItemState => { const { expressions } = action.payload; return { ...state, logsHighlighterExpressions: expressions }; }, }) .addMapper({ filter: initializeExploreAction, mapper: (state, action): ExploreItemState => { const { containerWidth, eventBridge, queries, range, ui } = action.payload; return { ...state, containerWidth, eventBridge, range, queries, initialized: true, queryKeys: getQueryKeys(queries, state.datasourceInstance), ...ui, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: updateDatasourceInstanceAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance } = action.payload; // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; const supportsTable = datasourceInstance.meta.tables; // Custom components const StartPage = datasourceInstance.components.ExploreStartPage; return { ...state, datasourceInstance, supportsGraph, supportsLogs, supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), }; }, }) .addMapper({ filter: loadDatasourceMissingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceMissing: true, datasourceLoading: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadDatasourcePendingAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceLoading: true, requestedDatasourceName: action.payload.requestedDatasourceName, }; }, }) .addMapper({ filter: loadDatasourceReadyAction, mapper: (state, action): ExploreItemState => { const { containerWidth, range, datasourceInstance } = state; const { history } = action.payload; const queryIntervals = getIntervals(range, datasourceInstance.interval, containerWidth); return { ...state, queryIntervals, history, datasourceLoading: false, datasourceMissing: false, logsHighlighterExpressions: undefined, queryTransactions: [], update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: modifyQueriesAction, mapper: (state, action): ExploreItemState => { const { queries, queryTransactions } = state; const { modification, index, modifier } = action.payload; let nextQueries: DataQuery[]; let nextQueryTransactions: QueryTransaction[]; if (index === undefined) { // Modify all queries nextQueries = queries.map((query, i) => ({ ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries), })); // Discard all ongoing transactions nextQueryTransactions = []; } else { // Modify query only at index nextQueries = queries.map((query, i) => { // Synchronize all queries with local query cache to ensure consistency // TODO still needed? return i === index ? { ...modifier({ ...query }, modification), ...generateEmptyQuery(state.queries) } : query; }); nextQueryTransactions = queryTransactions // Consume the hint corresponding to the action .map(qt => { if (qt.hints != null && qt.rowIndex === index) { qt.hints = qt.hints.filter(hint => hint.fix.action !== modification); } return qt; }) // Preserve previous row query transaction to keep results visible if next query is incomplete .filter(qt => modification.preventSubmit || qt.rowIndex !== index); } return { ...state, queries: nextQueries, queryKeys: getQueryKeys(nextQueries, state.datasourceInstance), queryTransactions: nextQueryTransactions, }; }, }) .addMapper({ filter: queryTransactionFailureAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = action.payload; return { ...state, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionStartAction, mapper: (state, action): ExploreItemState => { const { queryTransactions } = state; const { resultType, rowIndex, transaction } = action.payload; // Discarding existing transactions of same type const remainingTransactions = queryTransactions.filter( qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex) ); // Append new transaction const nextQueryTransactions: QueryTransaction[] = [...remainingTransactions, transaction]; return { ...state, queryTransactions: nextQueryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryTransactionSuccessAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queryIntervals } = state; const { history, queryTransactions } = action.payload; const results = calculateResultsFromQueryTransactions( queryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, history, queryTransactions, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { const { datasourceInstance, queries, queryIntervals, queryTransactions, queryKeys } = state; const { index } = action.payload; if (queries.length <= 1) { return state; } const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; const nextQueryKeys = [...queryKeys.slice(0, index), ...queryKeys.slice(index + 1)]; // Discard transactions related to row query const nextQueryTransactions = queryTransactions.filter(qt => nextQueries.some(nq => nq.key === qt.query.key)); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, datasourceInstance, queryIntervals.intervalMs ); return { ...state, ...results, queries: nextQueries, logsHighlighterExpressions: undefined, queryTransactions: nextQueryTransactions, queryKeys: nextQueryKeys, }; }, }) .addMapper({ filter: scanRangeAction, mapper: (state, action): ExploreItemState => { return { ...state, scanRange: action.payload.range }; }, }) .addMapper({ filter: scanStartAction, mapper: (state, action): ExploreItemState => { return { ...state, scanning: true, scanner: action.payload.scanner }; }, }) .addMapper({ filter: scanStopAction, mapper: (state): ExploreItemState => { const { queryTransactions } = state; const nextQueryTransactions = queryTransactions.filter(qt => qt.scanning && !qt.done); return { ...state, queryTransactions: nextQueryTransactions, scanning: false, scanRange: undefined, scanner: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: setQueriesAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries: queries.slice(), queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: updateUIStateAction, mapper: (state, action): ExploreItemState => { return { ...state, ...action.payload }; }, }) .addMapper({ filter: toggleGraphAction, mapper: (state): ExploreItemState => { const showingGraph = !state.showingGraph; let nextQueryTransactions = state.queryTransactions; if (!showingGraph) { // Discard transactions related to Graph query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleLogsAction, mapper: (state): ExploreItemState => { const showingLogs = !state.showingLogs; let nextQueryTransactions = state.queryTransactions; if (!showingLogs) { // Discard transactions related to Logs query nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs'); } return { ...state, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: toggleTableAction, mapper: (state): ExploreItemState => { const showingTable = !state.showingTable; if (showingTable) { return { ...state, queryTransactions: state.queryTransactions }; } // Toggle off needs discarding of table queries and results const nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table'); const results = calculateResultsFromQueryTransactions( nextQueryTransactions, state.datasourceInstance, state.queryIntervals.intervalMs ); return { ...state, ...results, queryTransactions: nextQueryTransactions }; }, }) .addMapper({ filter: queriesImportedAction, mapper: (state, action): ExploreItemState => { const { queries } = action.payload; return { ...state, queries, queryKeys: getQueryKeys(queries, state.datasourceInstance), }; }, }) .addMapper({ filter: toggleLogLevelAction, mapper: (state, action): ExploreItemState => { const { hiddenLogLevels } = action.payload; return { ...state, hiddenLogLevels: Array.from(hiddenLogLevels), }; }, }) .addMapper({ filter: testDataSourcePendingAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceSuccessAction, mapper: (state): ExploreItemState => { return { ...state, datasourceError: null, }; }, }) .addMapper({ filter: testDataSourceFailureAction, mapper: (state, action): ExploreItemState => { return { ...state, datasourceError: action.payload.error, queryTransactions: [], graphResult: undefined, tableResult: undefined, logsResult: undefined, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: loadExploreDatasources, mapper: (state, action): ExploreItemState => { return { ...state, exploreDatasources: action.payload.exploreDatasources, }; }, }) .create(); export const updateChildRefreshState = ( state: Readonly<ExploreItemState>, payload: LocationUpdate, exploreId: ExploreId ): ExploreItemState => { const path = payload.path || ''; const queryState = payload.query[exploreId] as string; if (!queryState) { return state; } const urlState = parseUrlState(queryState); if (!state.urlState || path !== '/explore') { // we only want to refresh when browser back/forward return { ...state, urlState, update: { datasource: false, queries: false, range: false, ui: false } }; } const datasource = _.isEqual(urlState ? urlState.datasource : '', state.urlState.datasource) === false; const queries = _.isEqual(urlState ? urlState.queries : [], state.urlState.queries) === false; const range = _.isEqual(urlState ? urlState.range : DEFAULT_RANGE, state.urlState.range) === false; const ui = _.isEqual(urlState ? urlState.ui : DEFAULT_UI_STATE, state.urlState.ui) === false; return { ...state, urlState, update: { ...state.update, datasource, queries, range, ui, }, }; }; /** * Global Explore reducer that handles multiple Explore areas (left and right). * Actions that have an `exploreId` get routed to the ExploreItemReducer. */ export const exploreReducer = (state = initialExploreState, action: HigherOrderAction): ExploreState => { switch (action.type) { case splitCloseAction.type: { const { itemId } = action.payload as SplitCloseActionPayload; const targetSplit = { left: itemId === ExploreId.left ? state.right : state.left, right: initialExploreState.right, }; return { ...state, ...targetSplit, split: false, }; } case ActionTypes.SplitOpen: { return { ...state, split: true, right: { ...action.payload.itemState } }; } case ActionTypes.ResetExplore: { return initialExploreState; } case updateLocation.type: { const { query } = action.payload; if (!query || !query[ExploreId.left]) { return state; } const split = query[ExploreId.right] ? true : false; const leftState = state[ExploreId.left]; const rightState = state[ExploreId.right]; return { ...state, split, [ExploreId.left]: updateChildRefreshState(leftState, action.payload, ExploreId.left), [ExploreId.right]: updateChildRefreshState(rightState, action.payload, ExploreId.right), }; } } if (action.payload) { const { exploreId } = action.payload as any; if (exploreId !== undefined) { const exploreItemState = state[exploreId]; return { ...state, [exploreId]: itemReducer(exploreItemState, action) }; } } return state; }; export default { explore: exploreReducer, };
public/app/features/explore/state/reducers.ts
1
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.9991273283958435, 0.03193986043334007, 0.0001657145330682397, 0.0009582872735336423, 0.17088519036769867 ]
{ "id": 9, "code_window": [ " ...state,\n", " exploreDatasources: action.payload.exploreDatasources,\n", " };\n", " },\n", " })\n", " .create();\n", "\n", "export const updateChildRefreshState = (\n", " state: Readonly<ExploreItemState>,\n", " payload: LocationUpdate,\n" ], "labels": [ "keep", "keep", "keep", "keep", "add", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " .addMapper({\n", " filter: runQueriesAction,\n", " mapper: (state): ExploreItemState => {\n", " const { range, datasourceInstance, containerWidth } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return {\n", " ...state,\n", " queryIntervals,\n", " };\n", " },\n", " })\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 561 }
package models import ( "encoding/json" "errors" "fmt" "time" ) // Typed errors var ( ErrInvalidRoleType = errors.New("Invalid role type") ErrLastOrgAdmin = errors.New("Cannot remove last organization admin") ErrOrgUserNotFound = errors.New("Cannot find the organization user") ErrOrgUserAlreadyAdded = errors.New("User is already added to organization") ) type RoleType string const ( ROLE_VIEWER RoleType = "Viewer" ROLE_EDITOR RoleType = "Editor" ROLE_ADMIN RoleType = "Admin" ) func (r RoleType) IsValid() bool { return r == ROLE_VIEWER || r == ROLE_ADMIN || r == ROLE_EDITOR } func (r RoleType) Includes(other RoleType) bool { if r == ROLE_ADMIN { return true } if r == ROLE_EDITOR { return other != ROLE_ADMIN } if r == ROLE_VIEWER { return other == ROLE_VIEWER } return false } func (r *RoleType) UnmarshalJSON(data []byte) error { var str string err := json.Unmarshal(data, &str) if err != nil { return err } *r = RoleType(str) if !(*r).IsValid() { if (*r) != "" { return fmt.Errorf("JSON validation error: invalid role value: %s", *r) } *r = ROLE_VIEWER } return nil } type OrgUser struct { Id int64 OrgId int64 UserId int64 Role RoleType Created time.Time Updated time.Time } // --------------------- // COMMANDS type RemoveOrgUserCommand struct { UserId int64 OrgId int64 ShouldDeleteOrphanedUser bool UserWasDeleted bool } type AddOrgUserCommand struct { LoginOrEmail string `json:"loginOrEmail" binding:"Required"` Role RoleType `json:"role" binding:"Required"` OrgId int64 `json:"-"` UserId int64 `json:"-"` } type UpdateOrgUserCommand struct { Role RoleType `json:"role" binding:"Required"` OrgId int64 `json:"-"` UserId int64 `json:"-"` } // ---------------------- // QUERIES type GetOrgUsersQuery struct { OrgId int64 Query string Limit int Result []*OrgUserDTO } // ---------------------- // Projections and DTOs type OrgUserDTO struct { OrgId int64 `json:"orgId"` UserId int64 `json:"userId"` Email string `json:"email"` AvatarUrl string `json:"avatarUrl"` Login string `json:"login"` Role string `json:"role"` LastSeenAt time.Time `json:"lastSeenAt"` LastSeenAtAge string `json:"lastSeenAtAge"` }
pkg/models/org_user.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00022756609541829675, 0.00017516383377369493, 0.00016481585043948144, 0.0001680200221017003, 0.000017414329704479314 ]
{ "id": 9, "code_window": [ " ...state,\n", " exploreDatasources: action.payload.exploreDatasources,\n", " };\n", " },\n", " })\n", " .create();\n", "\n", "export const updateChildRefreshState = (\n", " state: Readonly<ExploreItemState>,\n", " payload: LocationUpdate,\n" ], "labels": [ "keep", "keep", "keep", "keep", "add", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " .addMapper({\n", " filter: runQueriesAction,\n", " mapper: (state): ExploreItemState => {\n", " const { range, datasourceInstance, containerWidth } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return {\n", " ...state,\n", " queryIntervals,\n", " };\n", " },\n", " })\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 561 }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build !go1.10 package bidirule func (t *Transformer) isFinal() bool { if !t.isRTL() { return true } return t.state == ruleLTRFinal || t.state == ruleRTLFinal || t.state == ruleInitial }
vendor/golang.org/x/text/secure/bidirule/bidirule9.0.0.go
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00017411414592061192, 0.00017122953431680799, 0.00016834492271300405, 0.00017122953431680799, 0.0000028846116038039327 ]
{ "id": 9, "code_window": [ " ...state,\n", " exploreDatasources: action.payload.exploreDatasources,\n", " };\n", " },\n", " })\n", " .create();\n", "\n", "export const updateChildRefreshState = (\n", " state: Readonly<ExploreItemState>,\n", " payload: LocationUpdate,\n" ], "labels": [ "keep", "keep", "keep", "keep", "add", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ " .addMapper({\n", " filter: runQueriesAction,\n", " mapper: (state): ExploreItemState => {\n", " const { range, datasourceInstance, containerWidth } = state;\n", " let interval = '1s';\n", " if (datasourceInstance && datasourceInstance.interval) {\n", " interval = datasourceInstance.interval;\n", " }\n", " const queryIntervals = getIntervals(range, interval, containerWidth);\n", " return {\n", " ...state,\n", " queryIntervals,\n", " };\n", " },\n", " })\n" ], "file_path": "public/app/features/explore/state/reducers.ts", "type": "add", "edit_start_line_idx": 561 }
export enum OrgRole { Viewer = 'Viewer', Editor = 'Editor', Admin = 'Admin', } export interface DashboardAclDTO { id?: number; dashboardId?: number; userId?: number; userLogin?: string; userEmail?: string; teamId?: number; team?: string; permission?: PermissionLevel; role?: OrgRole; icon?: string; inherited?: boolean; } export interface DashboardAclUpdateDTO { userId: number; teamId: number; role: OrgRole; permission: PermissionLevel; } export interface DashboardAcl { id?: number; dashboardId?: number; userId?: number; userLogin?: string; userEmail?: string; teamId?: number; team?: string; permission?: PermissionLevel; role?: OrgRole; icon?: string; name?: string; inherited?: boolean; sortRank?: number; } export interface DashboardPermissionInfo { value: PermissionLevel; label: string; description: string; } export interface NewDashboardAclItem { teamId: number; userId: number; role?: OrgRole; permission: PermissionLevel; type: AclTarget; } export enum PermissionLevel { View = 1, Edit = 2, Admin = 4, } export enum DataSourcePermissionLevel { Query = 1, Admin = 2, } export enum AclTarget { Team = 'Team', User = 'User', Viewer = 'Viewer', Editor = 'Editor', } export interface AclTargetInfo { value: AclTarget; text: string; } export const dataSourceAclLevels = [ { value: DataSourcePermissionLevel.Query, label: 'Query', description: 'Can query data source.' }, ]; export const dashboardAclTargets: AclTargetInfo[] = [ { value: AclTarget.Team, text: 'Team' }, { value: AclTarget.User, text: 'User' }, { value: AclTarget.Viewer, text: 'Everyone With Viewer Role' }, { value: AclTarget.Editor, text: 'Everyone With Editor Role' }, ]; export const dashboardPermissionLevels: DashboardPermissionInfo[] = [ { value: PermissionLevel.View, label: 'View', description: 'Can view dashboards.' }, { value: PermissionLevel.Edit, label: 'Edit', description: 'Can add, edit and delete dashboards.' }, { value: PermissionLevel.Admin, label: 'Admin', description: 'Can add/remove permissions and can add, edit and delete dashboards.', }, ]; export enum TeamPermissionLevel { Member = 0, Admin = 4, } export interface TeamPermissionInfo { value: TeamPermissionLevel; label: string; description: string; } export const teamsPermissionLevels: TeamPermissionInfo[] = [ { value: TeamPermissionLevel.Member, label: 'Member', description: 'Is team member' }, { value: TeamPermissionLevel.Admin, label: 'Admin', description: 'Can add/remove permissions, members and delete team.', }, ];
public/app/types/acl.ts
0
https://github.com/grafana/grafana/commit/1341f4517a3a67c58c8d4bca2b184fb3dea735f7
[ 0.00020038129878230393, 0.0001712796656647697, 0.00016629704623483121, 0.00016870292893145233, 0.000008571274520363659 ]
{ "id": 0, "code_window": [ "\t)\n", "\n", "\tM_Http_Request_Summary = prometheus.NewSummaryVec(\n", "\t\tprometheus.SummaryOpts{\n", "\t\t\tName: \"http_request_duration\",\n", "\t\t\tHelp: \"http request summary\",\n", "\t\t},\n", "\t\t[]string{\"handler\", \"statuscode\", \"method\"},\n", "\t)\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\t\tName: \"http_request_duration_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 104 }
package metrics import ( "bytes" "encoding/json" "net/http" "runtime" "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" ) const exporterName = "grafana" var ( M_Instance_Start prometheus.Counter M_Page_Status *prometheus.CounterVec M_Api_Status *prometheus.CounterVec M_Proxy_Status *prometheus.CounterVec M_Http_Request_Total *prometheus.CounterVec M_Http_Request_Summary *prometheus.SummaryVec M_Api_User_SignUpStarted prometheus.Counter M_Api_User_SignUpCompleted prometheus.Counter M_Api_User_SignUpInvite prometheus.Counter M_Api_Dashboard_Save prometheus.Summary M_Api_Dashboard_Get prometheus.Summary M_Api_Dashboard_Search prometheus.Summary M_Api_Admin_User_Create prometheus.Counter M_Api_Login_Post prometheus.Counter M_Api_Login_OAuth prometheus.Counter M_Api_Org_Create prometheus.Counter M_Api_Dashboard_Snapshot_Create prometheus.Counter M_Api_Dashboard_Snapshot_External prometheus.Counter M_Api_Dashboard_Snapshot_Get prometheus.Counter M_Api_Dashboard_Insert prometheus.Counter M_Alerting_Result_State *prometheus.CounterVec M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers M_DataSource_ProxyReq_Timer prometheus.Summary M_Alerting_Execution_Time prometheus.Summary // StatTotals M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge ) func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", Help: "counter for started instances", Namespace: exporterName, }) M_Page_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, }, []string{"code"}, ) M_Api_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, }, []string{"code"}, ) M_Proxy_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, }, []string{"code"}, ) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "http_request_total", Help: "http request counter", }, []string{"handler", "statuscode", "method"}, ) M_Http_Request_Summary = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "http_request_duration", Help: "http request summary", }, []string{"handler", "statuscode", "method"}, ) M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, }) M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_save", Help: "summary for dashboard save duration", Namespace: exporterName, }) M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_get", Help: "summary for dashboard get duration", Namespace: exporterName, }) M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_search", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, }) M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_result_total", Help: "alert execution result counter", Namespace: exporterName, }, []string{"state"}) M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_notification_sent_total", Help: "counter for how many alert notifications been sent", Namespace: exporterName, }, []string{"type"}) M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, }) M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dataproxy_request_all", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "alerting_execution_time_seconds", Help: "summary of alert exeuction duration", Namespace: exporterName, }) M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "alerting_active_alerts", Help: "amount of active alerts", Namespace: exporterName, }) M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", Namespace: exporterName, }) M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_users", Help: "total amount of users", Namespace: exporterName, }) M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", Namespace: exporterName, }) M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_playlists", Help: "total amount of playlists", Namespace: exporterName, }) } func initMetricVars(settings *MetricSettings) { prometheus.MustRegister( M_Instance_Start, M_Page_Status, M_Api_Status, M_Proxy_Status, M_Http_Request_Total, M_Http_Request_Summary, M_Api_User_SignUpStarted, M_Api_User_SignUpCompleted, M_Api_User_SignUpInvite, M_Api_Dashboard_Save, M_Api_Dashboard_Get, M_Api_Dashboard_Search, M_DataSource_ProxyReq_Timer, M_Alerting_Execution_Time, M_Api_Admin_User_Create, M_Api_Login_Post, M_Api_Login_OAuth, M_Api_Org_Create, M_Api_Dashboard_Snapshot_Create, M_Api_Dashboard_Snapshot_External, M_Api_Dashboard_Snapshot_Get, M_Api_Dashboard_Insert, M_Alerting_Result_State, M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, M_StatTotal_Orgs, M_StatTotal_Playlists) go instrumentationLoop(settings) } func instrumentationLoop(settings *MetricSettings) chan struct{} { M_Instance_Start.Inc() onceEveryDayTick := time.NewTicker(time.Hour * 24) secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) for { select { case <-onceEveryDayTick.C: sendUsageStats() case <-secondTicker.C: updateTotalStats() } } } var metricPublishCounter int64 = 0 func updateTotalStats() { metricPublishCounter++ if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } } func sendUsageStats() { if !setting.ReportingEnabled { return } metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") version := strings.Replace(setting.BuildVersion, ".", "_", -1) metrics := map[string]interface{}{} report := map[string]interface{}{ "version": version, "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, } statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { metricsLogger.Error("Failed to get datasource stats", "error", err) return } // send counters for each data source // but ignore any custom data sources // as sending that name could be sensitive information dsOtherCount := 0 for _, dsStat := range dsStats.Result { if models.IsKnownDataSourcePlugin(dsStat.Type) { metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count } else { dsOtherCount += dsStat.Count } } metrics["stats.ds.other.count"] = dsOtherCount out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: time.Duration(5 * time.Second)} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) }
pkg/metrics/metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.9984192848205566, 0.07718518376350403, 0.00016356761625502259, 0.0022699106484651566, 0.249654158949852 ]
{ "id": 0, "code_window": [ "\t)\n", "\n", "\tM_Http_Request_Summary = prometheus.NewSummaryVec(\n", "\t\tprometheus.SummaryOpts{\n", "\t\t\tName: \"http_request_duration\",\n", "\t\t\tHelp: \"http request summary\",\n", "\t\t},\n", "\t\t[]string{\"handler\", \"statuscode\", \"method\"},\n", "\t)\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\t\tName: \"http_request_duration_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 104 }
#### Alias patterns - {{term fieldname}} = replaced with value of term group by - {{metric}} = replaced with metric name (ex. Average, Min, Max) - {{field}} = replaced with the metric field name #### Documentation links [Grafana's Elasticsearch Documentation](http://docs.grafana.org/features/datasources/elasticsearch) [Official Elasticsearch Documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html)
public/app/plugins/datasource/elasticsearch/query_help.md
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.000174795335624367, 0.0001710728683974594, 0.00016735040117055178, 0.0001710728683974594, 0.000003722467226907611 ]
{ "id": 0, "code_window": [ "\t)\n", "\n", "\tM_Http_Request_Summary = prometheus.NewSummaryVec(\n", "\t\tprometheus.SummaryOpts{\n", "\t\t\tName: \"http_request_duration\",\n", "\t\t\tHelp: \"http request summary\",\n", "\t\t},\n", "\t\t[]string{\"handler\", \"statuscode\", \"method\"},\n", "\t)\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\t\tName: \"http_request_duration_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 104 }
define( [ "../core", "./support", "../core/init" ], function( jQuery, support ) { "use strict"; var rreturn = /\r/g, rspaces = /[\x20\t\r\n\f]+/g; jQuery.fn.extend( { val: function( value ) { var hooks, ret, isFunction, elem = this[ 0 ]; if ( !arguments.length ) { if ( elem ) { hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ]; if ( hooks && "get" in hooks && ( ret = hooks.get( elem, "value" ) ) !== undefined ) { return ret; } ret = elem.value; return typeof ret === "string" ? // Handle most common string cases ret.replace( rreturn, "" ) : // Handle cases where value is null/undef or number ret == null ? "" : ret; } return; } isFunction = jQuery.isFunction( value ); return this.each( function( i ) { var val; if ( this.nodeType !== 1 ) { return; } if ( isFunction ) { val = value.call( this, i, jQuery( this ).val() ); } else { val = value; } // Treat null/undefined as ""; convert numbers to string if ( val == null ) { val = ""; } else if ( typeof val === "number" ) { val += ""; } else if ( jQuery.isArray( val ) ) { val = jQuery.map( val, function( value ) { return value == null ? "" : value + ""; } ); } hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; // If set returns undefined, fall back to normal setting if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { this.value = val; } } ); } } ); jQuery.extend( { valHooks: { option: { get: function( elem ) { var val = jQuery.find.attr( elem, "value" ); return val != null ? val : // Support: IE <=10 - 11 only // option.text throws exceptions (#14686, #14858) // Strip and collapse whitespace // https://html.spec.whatwg.org/#strip-and-collapse-whitespace jQuery.trim( jQuery.text( elem ) ).replace( rspaces, " " ); } }, select: { get: function( elem ) { var value, option, options = elem.options, index = elem.selectedIndex, one = elem.type === "select-one", values = one ? null : [], max = one ? index + 1 : options.length, i = index < 0 ? max : one ? index : 0; // Loop through all the selected options for ( ; i < max; i++ ) { option = options[ i ]; // Support: IE <=9 only // IE8-9 doesn't update selected after form reset (#2551) if ( ( option.selected || i === index ) && // Don't return options that are disabled or in a disabled optgroup !option.disabled && ( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) { // Get the specific value for the option value = jQuery( option ).val(); // We don't need an array for one selects if ( one ) { return value; } // Multi-Selects return an array values.push( value ); } } return values; }, set: function( elem, value ) { var optionSet, option, options = elem.options, values = jQuery.makeArray( value ), i = options.length; while ( i-- ) { option = options[ i ]; /* eslint-disable no-cond-assign */ if ( option.selected = jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 ) { optionSet = true; } /* eslint-enable no-cond-assign */ } // Force browsers to behave consistently when non-matching value is set if ( !optionSet ) { elem.selectedIndex = -1; } return values; } } } } ); // Radios and checkboxes getter/setter jQuery.each( [ "radio", "checkbox" ], function() { jQuery.valHooks[ this ] = { set: function( elem, value ) { if ( jQuery.isArray( value ) ) { return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); } } }; if ( !support.checkOn ) { jQuery.valHooks[ this ].get = function( elem ) { return elem.getAttribute( "value" ) === null ? "on" : elem.value; }; } } ); } );
public/vendor/jquery/src/attributes/val.js
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017713604029268026, 0.0001707076735328883, 0.0001654549705563113, 0.00017032083997037262, 0.000002729950438151718 ]
{ "id": 0, "code_window": [ "\t)\n", "\n", "\tM_Http_Request_Summary = prometheus.NewSummaryVec(\n", "\t\tprometheus.SummaryOpts{\n", "\t\t\tName: \"http_request_duration\",\n", "\t\t\tHelp: \"http request summary\",\n", "\t\t},\n", "\t\t[]string{\"handler\", \"statuscode\", \"method\"},\n", "\t)\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\t\tName: \"http_request_duration_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 104 }
declare let helpers: any; export default helpers;
public/test/specs/helpers.d.ts
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.0001683667505858466, 0.0001683667505858466, 0.0001683667505858466, 0.0001683667505858466, 0 ]
{ "id": 1, "code_window": [ "\t\tHelp: \"amount of users who have been invited\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_save\",\n", "\t\tHelp: \"summary for dashboard save duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_save_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 129 }
package metrics import ( "bytes" "encoding/json" "net/http" "runtime" "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" ) const exporterName = "grafana" var ( M_Instance_Start prometheus.Counter M_Page_Status *prometheus.CounterVec M_Api_Status *prometheus.CounterVec M_Proxy_Status *prometheus.CounterVec M_Http_Request_Total *prometheus.CounterVec M_Http_Request_Summary *prometheus.SummaryVec M_Api_User_SignUpStarted prometheus.Counter M_Api_User_SignUpCompleted prometheus.Counter M_Api_User_SignUpInvite prometheus.Counter M_Api_Dashboard_Save prometheus.Summary M_Api_Dashboard_Get prometheus.Summary M_Api_Dashboard_Search prometheus.Summary M_Api_Admin_User_Create prometheus.Counter M_Api_Login_Post prometheus.Counter M_Api_Login_OAuth prometheus.Counter M_Api_Org_Create prometheus.Counter M_Api_Dashboard_Snapshot_Create prometheus.Counter M_Api_Dashboard_Snapshot_External prometheus.Counter M_Api_Dashboard_Snapshot_Get prometheus.Counter M_Api_Dashboard_Insert prometheus.Counter M_Alerting_Result_State *prometheus.CounterVec M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers M_DataSource_ProxyReq_Timer prometheus.Summary M_Alerting_Execution_Time prometheus.Summary // StatTotals M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge ) func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", Help: "counter for started instances", Namespace: exporterName, }) M_Page_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, }, []string{"code"}, ) M_Api_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, }, []string{"code"}, ) M_Proxy_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, }, []string{"code"}, ) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "http_request_total", Help: "http request counter", }, []string{"handler", "statuscode", "method"}, ) M_Http_Request_Summary = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "http_request_duration", Help: "http request summary", }, []string{"handler", "statuscode", "method"}, ) M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, }) M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_save", Help: "summary for dashboard save duration", Namespace: exporterName, }) M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_get", Help: "summary for dashboard get duration", Namespace: exporterName, }) M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_search", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, }) M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_result_total", Help: "alert execution result counter", Namespace: exporterName, }, []string{"state"}) M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_notification_sent_total", Help: "counter for how many alert notifications been sent", Namespace: exporterName, }, []string{"type"}) M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, }) M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dataproxy_request_all", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "alerting_execution_time_seconds", Help: "summary of alert exeuction duration", Namespace: exporterName, }) M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "alerting_active_alerts", Help: "amount of active alerts", Namespace: exporterName, }) M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", Namespace: exporterName, }) M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_users", Help: "total amount of users", Namespace: exporterName, }) M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", Namespace: exporterName, }) M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_playlists", Help: "total amount of playlists", Namespace: exporterName, }) } func initMetricVars(settings *MetricSettings) { prometheus.MustRegister( M_Instance_Start, M_Page_Status, M_Api_Status, M_Proxy_Status, M_Http_Request_Total, M_Http_Request_Summary, M_Api_User_SignUpStarted, M_Api_User_SignUpCompleted, M_Api_User_SignUpInvite, M_Api_Dashboard_Save, M_Api_Dashboard_Get, M_Api_Dashboard_Search, M_DataSource_ProxyReq_Timer, M_Alerting_Execution_Time, M_Api_Admin_User_Create, M_Api_Login_Post, M_Api_Login_OAuth, M_Api_Org_Create, M_Api_Dashboard_Snapshot_Create, M_Api_Dashboard_Snapshot_External, M_Api_Dashboard_Snapshot_Get, M_Api_Dashboard_Insert, M_Alerting_Result_State, M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, M_StatTotal_Orgs, M_StatTotal_Playlists) go instrumentationLoop(settings) } func instrumentationLoop(settings *MetricSettings) chan struct{} { M_Instance_Start.Inc() onceEveryDayTick := time.NewTicker(time.Hour * 24) secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) for { select { case <-onceEveryDayTick.C: sendUsageStats() case <-secondTicker.C: updateTotalStats() } } } var metricPublishCounter int64 = 0 func updateTotalStats() { metricPublishCounter++ if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } } func sendUsageStats() { if !setting.ReportingEnabled { return } metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") version := strings.Replace(setting.BuildVersion, ".", "_", -1) metrics := map[string]interface{}{} report := map[string]interface{}{ "version": version, "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, } statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { metricsLogger.Error("Failed to get datasource stats", "error", err) return } // send counters for each data source // but ignore any custom data sources // as sending that name could be sensitive information dsOtherCount := 0 for _, dsStat := range dsStats.Result { if models.IsKnownDataSourcePlugin(dsStat.Type) { metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count } else { dsOtherCount += dsStat.Count } } metrics["stats.ds.other.count"] = dsOtherCount out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: time.Duration(5 * time.Second)} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) }
pkg/metrics/metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.9990849494934082, 0.19615527987480164, 0.00016906106611713767, 0.0046598296612501144, 0.3733038306236267 ]
{ "id": 1, "code_window": [ "\t\tHelp: \"amount of users who have been invited\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_save\",\n", "\t\tHelp: \"summary for dashboard save duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_save_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 129 }
import {describe, beforeEach, it, sinon, expect} from 'test/lib/common'; import {PromCompleter} from '../completer'; import {PrometheusDatasource} from '../datasource'; describe('Prometheus editor completer', function() { let editor = {}; let session = { getTokenAt: sinon.stub().returns({}), getLine: sinon.stub().returns(""), }; let datasourceStub = <PrometheusDatasource>{}; let completer = new PromCompleter(datasourceStub); describe("When inside brackets", () => { it("Should return range vectors", () => { completer.getCompletions(editor, session, 10, "[", (s, res) => { expect(res[0]).to.eql({caption: '1s', value: '[1s', meta: 'range vector'}); }); }); }); });
public/app/plugins/datasource/prometheus/specs/completer_specs.ts
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017628011119086295, 0.0001760205050231889, 0.00017556335660628974, 0.00017621809092815965, 3.2425310791950324e-7 ]
{ "id": 1, "code_window": [ "\t\tHelp: \"amount of users who have been invited\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_save\",\n", "\t\tHelp: \"summary for dashboard save duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_save_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 129 }
///<reference path="../../../headers/common.d.ts" /> import _ from 'lodash'; import $ from 'jquery'; import moment from 'moment'; import angular from 'angular'; import {transformers} from './transformers'; import kbn from 'app/core/utils/kbn'; export class TablePanelEditorCtrl { panel: any; panelCtrl: any; transformers: any; fontSizes: any; addColumnSegment: any; getColumnNames: any; canSetColumns: boolean; columnsHelpMessage: string; /** @ngInject */ constructor($scope, private $q, private uiSegmentSrv) { $scope.editor = this; this.panelCtrl = $scope.ctrl; this.panel = this.panelCtrl.panel; this.transformers = transformers; this.fontSizes = ['80%', '90%', '100%', '110%', '120%', '130%', '150%', '160%', '180%', '200%', '220%', '250%']; this.addColumnSegment = uiSegmentSrv.newPlusButton(); this.updateTransformHints(); } updateTransformHints() { this.canSetColumns = false; this.columnsHelpMessage = ''; switch (this.panel.transform) { case "timeseries_aggregations": { this.canSetColumns = true; break; } case "json": { this.canSetColumns = true; break; } case "table": { this.columnsHelpMessage = "Columns and their order are determined by the data query"; } } } getColumnOptions() { if (!this.panelCtrl.dataRaw) { return this.$q.when([]); } var columns = this.transformers[this.panel.transform].getColumns(this.panelCtrl.dataRaw); var segments = _.map(columns, (c: any) => this.uiSegmentSrv.newSegment({value: c.text})); return this.$q.when(segments); } addColumn() { var columns = transformers[this.panel.transform].getColumns(this.panelCtrl.dataRaw); var column = _.find(columns, {text: this.addColumnSegment.value}); if (column) { this.panel.columns.push(column); this.render(); } var plusButton = this.uiSegmentSrv.newPlusButton(); this.addColumnSegment.html = plusButton.html; this.addColumnSegment.value = plusButton.value; } transformChanged() { this.panel.columns = []; if (this.panel.transform === 'timeseries_aggregations') { this.panel.columns.push({text: 'Avg', value: 'avg'}); } this.updateTransformHints(); this.render(); } render() { this.panelCtrl.render(); } removeColumn(column) { this.panel.columns = _.without(this.panel.columns, column); this.panelCtrl.render(); } } /** @ngInject */ export function tablePanelEditor($q, uiSegmentSrv) { 'use strict'; return { restrict: 'E', scope: true, templateUrl: 'public/app/plugins/panel/table/editor.html', controller: TablePanelEditorCtrl, }; }
public/app/plugins/panel/table/editor.ts
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017549388576298952, 0.00017092777125071734, 0.00015951764362398535, 0.00017274325364269316, 0.000004523406005318975 ]
{ "id": 1, "code_window": [ "\t\tHelp: \"amount of users who have been invited\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_save\",\n", "\t\tHelp: \"summary for dashboard save duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_save_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 129 }
// Global values // -------------------------------------------------- // Grays // ------------------------- $black: #000; // ------------------------- $black: #000; $dark-1: #141414; $dark-2: #1f1d1d; $dark-3: #292929; $dark-4: #333333; $dark-5: #444444; $gray-1: #555555; $gray-2: #7B7B7B; $gray-3: #b3b3b3; $gray-4: #D8D9DA; $gray-5: #ECECEC; $gray-6: #f4f5f8; $gray-7: #fbfbfb; $white: #fff; // Accent colors // ------------------------- $blue: #33B5E5; $blue-dark: #005f81; $green: #609000; $red: #CC3900; $yellow: #ECBB13; $pink: #FF4444; $purple: #9933CC; $variable: #32D1DF; $orange: #eb7b18; $brand-primary: $orange; $brand-success: $green; $brand-warning: $brand-primary; $brand-danger: $red; // Status colors // ------------------------- $online: #10a345; $warn: #F79520; $critical: #ed2e18; // Scaffolding // ------------------------- $body-bg: rgb(20,20,20); $page-bg: $dark-2; $body-color: $gray-4; $text-color: $gray-4; $text-color-strong: $white; $text-color-weak: $gray-2; $text-color-faint: $dark-5; $text-color-emphasis: $gray-5; $text-shadow-strong: 1px 1px 4px $black; $text-shadow-faint: 1px 1px 4px rgb(45, 45, 45); // gradients $brand-gradient: linear-gradient(to right, rgba(255,213,0,0.7) 0%, rgba(255,68,0,0.7) 99%, rgba(255,68,0,0.7) 100%); $page-gradient: linear-gradient(60deg, transparent 70%, darken($page-bg, 4%) 98%); // Links // ------------------------- $link-color: darken($white, 11%); $link-color-disabled: darken($link-color, 30%); $link-hover-color: $white; $external-link-color: $blue; // Typography // ------------------------- $headings-color: darken($white,11%); $abbr-border-color: $gray-3 !default; $text-muted: $text-color-weak; $blockquote-small-color: $gray-3 !default; $blockquote-border-color: $gray-4 !default; $hr-border-color: rgba(0,0,0,.1) !default; // Components $component-active-color: #fff !default; $component-active-bg: $brand-primary !default; // Panel // ------------------------- $panel-bg: $dark-2; $panel-border: solid 1px $dark-3; $panel-drop-zone-bg: repeating-linear-gradient(-128deg, #111, #111 10px, #191919 10px, #222 20px); $panel-menu-border: solid 1px black; $divider-border-color: #555; // Graphite Target Editor $tight-form-border: #050505; $tight-form-bg: $dark-3; $tight-form-func-bg: #333; $tight-form-func-highlight-bg: #444; $modal-background: $black; $code-tag-bg: $gray-1; $code-tag-border: lighten($code-tag-bg, 2%); // Lists $grafanaListBackground: $dark-3; $grafanaListAccent: lighten($dark-2, 2%); $grafanaListBorderTop: $dark-3; $grafanaListBorderBottom: $black; $grafanaListHighlight: #333; $grafanaListMainLinkColor: $text-color; // Scrollbars $scrollbarBackground: #3a3a3a; $scrollbarBackground2: #3a3a3a; $scrollbarBorder: black; // Tables // ------------------------- $table-bg: transparent; // overall background-color $table-bg-accent: $dark-3; // for striping $table-bg-hover: $dark-4; // for hover $table-border: $dark-3; // table and cell border // Buttons // ------------------------- $btn-primary-bg: $brand-primary; $btn-primary-bg-hl: lighten($brand-primary, 8%); $btn-secondary-bg: $blue-dark; $btn-secondary-bg-hl: lighten($blue-dark, 5%); $btn-success-bg: lighten($green, 3%); $btn-success-bg-hl: darken($green, 3%); $btn-warning-bg: $brand-warning; $btn-warning-bg-hl: lighten($brand-warning, 8%); $btn-danger-bg: $red; $btn-danger-bg-hl: lighten($red, 5%); $btn-inverse-bg: $dark-3; $btn-inverse-bg-hl: lighten($dark-3, 4%); $btn-inverse-text-color: $link-color; $btn-link-color: $gray-3; $iconContainerBackground: $black; $btn-divider-left: $dark-4; $btn-divider-right: $dark-2; $btn-drag-image: '../img/grab_dark.svg'; // Forms // ------------------------- $input-bg: $dark-4; $input-bg-disabled: $dark-3; $input-color: $gray-4; $input-border-color: $dark-4; $input-box-shadow: inset 1px 0px 0.3rem 0px rgba(150, 150, 150, 0.10); $input-border-focus: $input-border-color !default; $input-box-shadow-focus: rgba(102,175,233,.6) !default; $input-color-placeholder: $gray-1 !default; $input-label-bg: $dark-3; $input-invalid-border-color: lighten($red, 5%); // Search $search-shadow: 0 0 35px 0 $body-bg; // Dropdowns // ------------------------- $dropdownBackground: $dark-3; $dropdownBorder: rgba(0,0,0,.2); $dropdownDividerTop: transparent; $dropdownDividerBottom: #444; $dropdownDivider: $dropdownDividerBottom; $dropdownTitle: $link-color-disabled; $dropdownLinkColor: $text-color; $dropdownLinkColorHover: $white; $dropdownLinkColorActive: $white; $dropdownLinkBackgroundActive: $dark-4; $dropdownLinkBackgroundHover: $dark-4; // COMPONENT VARIABLES // -------------------------------------------------- // ------------------------- $placeholderText: darken($text-color, 25%); // Horizontal forms & lists // ------------------------- $horizontalComponentOffset: 180px; // Wells // ------------------------- $wellBackground: #131517; $navbarHeight: 52px; $navbarBackgroundHighlight: $dark-3; $navbarBackground: $dark-3; $navbarBorder: 1px solid $body-bg; $navbarText: $gray-4; $navbarLinkColor: $gray-4; $navbarLinkColorHover: $white; $navbarLinkColorActive: $navbarLinkColorHover; $navbarLinkBackgroundHover: transparent; $navbarLinkBackgroundActive: $navbarBackground; $navbarBrandColor: $link-color; $navbarDropdownShadow: inset 0px 4px 10px -4px $body-bg; $navbarButtonBackground: lighten($navbarBackground, 3%); $navbarButtonBackgroundHighlight: lighten($navbarBackground, 5%); // Sidemenu // ------------------------- $side-menu-bg: $body-bg; $side-menu-item-hover-bg: $dark-3; $side-menu-opacity: 0.97; // Pagination // ------------------------- $paginationBackground: $body-bg; $paginationBorder: transparent; $paginationActiveBackground: $blue; // Form states and alerts // ------------------------- $warning-text-color: $warn; $error-text-color: #E84D4D; $success-text-color: #12D95A; $info-text-color: $blue-dark; $alert-error-bg: linear-gradient(90deg, #d44939, #e0603d); $alert-success-bg: linear-gradient(90deg, #3aa655, #47b274); $alert-warning-bg: linear-gradient(90deg, #d44939, #e0603d); $alert-info-bg: linear-gradient(100deg, #1a4552, #00374a); // popover $popover-bg: $panel-bg; $popover-color: $text-color; $popover-border-color: $gray-1; $popover-help-bg: $btn-secondary-bg; $popover-help-color: $text-color; $popover-error-bg: $btn-danger-bg; // Tooltips and popovers // ------------------------- $tooltipColor: $popover-help-color; $tooltipBackground: $popover-help-bg; $tooltipArrowWidth: 5px; $tooltipArrowColor: $tooltipBackground; $tooltipLinkColor: $link-color; $graph-tooltip-bg: $dark-1; // images $checkboxImageUrl: '../img/checkbox.png'; // cards $card-background: linear-gradient(135deg, #2f2f2f, #262626); $card-background-hover: linear-gradient(135deg, #343434, #262626); $card-shadow: -1px -1px 0 0 hsla(0, 0%, 100%, .1), 1px 1px 0 0 rgba(0, 0, 0, .3); // info box $info-box-background: linear-gradient(100deg, #1a4552, #00374a); // footer $footer-link-color: $gray-1; $footer-link-hover: $gray-4; // collapse box $collapse-box-body-border: $dark-5; $collapse-box-body-error-border: $red; // json-explorer $json-explorer-default-color: $text-color; $json-explorer-string-color: #23d662; $json-explorer-number-color: $variable; $json-explorer-boolean-color: $variable; $json-explorer-null-color: #EEC97D; $json-explorer-undefined-color: rgb(239, 143, 190); $json-explorer-function-color: #FD48CB; $json-explorer-rotate-time: 100ms; $json-explorer-toggler-opacity: 0.6; $json-explorer-toggler-color: #45376F; $json-explorer-bracket-color: #9494FF; $json-explorer-key-color: #23A0DB; $json-explorer-url-color: #027BFF; // Changelog and diff // ------------------------- $diff-label-bg: $dark-2; $diff-label-fg: $white; $diff-group-bg: $dark-4; $diff-arrow-color: $white; $diff-json-bg: $dark-4; $diff-json-fg: $gray-5; $diff-json-added: #457740; $diff-json-deleted: #a04338; $diff-json-old: #a04338; $diff-json-new: #457740; $diff-json-changed-fg: $gray-5; $diff-json-changed-num: $text-color; $diff-json-icon: $gray-7;
public/sass/_variables.dark.scss
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017727813974488527, 0.0001740185107337311, 0.00016552978195250034, 0.00017446995479986072, 0.000002506528744561365 ]
{ "id": 2, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_get\",\n", "\t\tHelp: \"summary for dashboard get duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_get_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 135 }
package metrics import ( "bytes" "encoding/json" "net/http" "runtime" "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" ) const exporterName = "grafana" var ( M_Instance_Start prometheus.Counter M_Page_Status *prometheus.CounterVec M_Api_Status *prometheus.CounterVec M_Proxy_Status *prometheus.CounterVec M_Http_Request_Total *prometheus.CounterVec M_Http_Request_Summary *prometheus.SummaryVec M_Api_User_SignUpStarted prometheus.Counter M_Api_User_SignUpCompleted prometheus.Counter M_Api_User_SignUpInvite prometheus.Counter M_Api_Dashboard_Save prometheus.Summary M_Api_Dashboard_Get prometheus.Summary M_Api_Dashboard_Search prometheus.Summary M_Api_Admin_User_Create prometheus.Counter M_Api_Login_Post prometheus.Counter M_Api_Login_OAuth prometheus.Counter M_Api_Org_Create prometheus.Counter M_Api_Dashboard_Snapshot_Create prometheus.Counter M_Api_Dashboard_Snapshot_External prometheus.Counter M_Api_Dashboard_Snapshot_Get prometheus.Counter M_Api_Dashboard_Insert prometheus.Counter M_Alerting_Result_State *prometheus.CounterVec M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers M_DataSource_ProxyReq_Timer prometheus.Summary M_Alerting_Execution_Time prometheus.Summary // StatTotals M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge ) func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", Help: "counter for started instances", Namespace: exporterName, }) M_Page_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, }, []string{"code"}, ) M_Api_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, }, []string{"code"}, ) M_Proxy_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, }, []string{"code"}, ) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "http_request_total", Help: "http request counter", }, []string{"handler", "statuscode", "method"}, ) M_Http_Request_Summary = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "http_request_duration", Help: "http request summary", }, []string{"handler", "statuscode", "method"}, ) M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, }) M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_save", Help: "summary for dashboard save duration", Namespace: exporterName, }) M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_get", Help: "summary for dashboard get duration", Namespace: exporterName, }) M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_search", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, }) M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_result_total", Help: "alert execution result counter", Namespace: exporterName, }, []string{"state"}) M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_notification_sent_total", Help: "counter for how many alert notifications been sent", Namespace: exporterName, }, []string{"type"}) M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, }) M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dataproxy_request_all", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "alerting_execution_time_seconds", Help: "summary of alert exeuction duration", Namespace: exporterName, }) M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "alerting_active_alerts", Help: "amount of active alerts", Namespace: exporterName, }) M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", Namespace: exporterName, }) M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_users", Help: "total amount of users", Namespace: exporterName, }) M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", Namespace: exporterName, }) M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_playlists", Help: "total amount of playlists", Namespace: exporterName, }) } func initMetricVars(settings *MetricSettings) { prometheus.MustRegister( M_Instance_Start, M_Page_Status, M_Api_Status, M_Proxy_Status, M_Http_Request_Total, M_Http_Request_Summary, M_Api_User_SignUpStarted, M_Api_User_SignUpCompleted, M_Api_User_SignUpInvite, M_Api_Dashboard_Save, M_Api_Dashboard_Get, M_Api_Dashboard_Search, M_DataSource_ProxyReq_Timer, M_Alerting_Execution_Time, M_Api_Admin_User_Create, M_Api_Login_Post, M_Api_Login_OAuth, M_Api_Org_Create, M_Api_Dashboard_Snapshot_Create, M_Api_Dashboard_Snapshot_External, M_Api_Dashboard_Snapshot_Get, M_Api_Dashboard_Insert, M_Alerting_Result_State, M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, M_StatTotal_Orgs, M_StatTotal_Playlists) go instrumentationLoop(settings) } func instrumentationLoop(settings *MetricSettings) chan struct{} { M_Instance_Start.Inc() onceEveryDayTick := time.NewTicker(time.Hour * 24) secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) for { select { case <-onceEveryDayTick.C: sendUsageStats() case <-secondTicker.C: updateTotalStats() } } } var metricPublishCounter int64 = 0 func updateTotalStats() { metricPublishCounter++ if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } } func sendUsageStats() { if !setting.ReportingEnabled { return } metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") version := strings.Replace(setting.BuildVersion, ".", "_", -1) metrics := map[string]interface{}{} report := map[string]interface{}{ "version": version, "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, } statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { metricsLogger.Error("Failed to get datasource stats", "error", err) return } // send counters for each data source // but ignore any custom data sources // as sending that name could be sensitive information dsOtherCount := 0 for _, dsStat := range dsStats.Result { if models.IsKnownDataSourcePlugin(dsStat.Type) { metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count } else { dsOtherCount += dsStat.Count } } metrics["stats.ds.other.count"] = dsOtherCount out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: time.Duration(5 * time.Second)} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) }
pkg/metrics/metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.9991438388824463, 0.1868177354335785, 0.00016653441707603633, 0.013081725686788559, 0.3510536849498749 ]
{ "id": 2, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_get\",\n", "\t\tHelp: \"summary for dashboard get duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_get_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 135 }
///<reference path="../../headers/common.d.ts" /> import _ from 'lodash'; import kbn from 'app/core/utils/kbn'; import {Variable, assignModelProperties, variableTypes} from './variable'; import {VariableSrv} from './variable_srv'; export class IntervalVariable implements Variable { auto_count: number; auto_min: number; options: any; auto: boolean; query: string; refresh: number; current: any; defaults = { type: 'interval', name: '', hide: 0, label: '', refresh: 2, options: [], current: {}, query: '1m,10m,30m,1h,6h,12h,1d,7d,14d,30d', auto: false, auto_min: '10s', auto_count: 30, }; /** @ngInject **/ constructor(private model, private timeSrv, private templateSrv, private variableSrv) { assignModelProperties(this, model, this.defaults); this.refresh = 2; } getSaveModel() { assignModelProperties(this.model, this, this.defaults); return this.model; } setValue(option) { this.updateAutoValue(); return this.variableSrv.setOptionAsCurrent(this, option); } updateAutoValue() { if (!this.auto) { return; } // add auto option if missing if (this.options.length && this.options[0].text !== 'auto') { this.options.unshift({ text: 'auto', value: '$__auto_interval' }); } var res = kbn.calculateInterval(this.timeSrv.timeRange(), this.auto_count, this.auto_min); this.templateSrv.setGrafanaVariable('$__auto_interval', res.interval); } updateOptions() { // extract options between quotes and/or comma this.options = _.map(this.query.match(/(["'])(.*?)\1|\w+/g), function(text) { text = text.replace(/["']+/g, ''); return {text: text.trim(), value: text.trim()}; }); this.updateAutoValue(); return this.variableSrv.validateVariableSelectionState(this); } dependsOn(variable) { return false; } setValueFromUrl(urlValue) { this.updateAutoValue(); return this.variableSrv.setOptionFromUrl(this, urlValue); } getValueForUrl() { return this.current.value; } } variableTypes['interval'] = { name: 'Interval', ctor: IntervalVariable, description: 'Define a timespan interval (ex 1m, 1h, 1d)', };
public/app/features/templating/interval_variable.ts
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017928284069057554, 0.00017665095219854265, 0.00017328254762105644, 0.00017709027451928705, 0.0000017569120700500207 ]
{ "id": 2, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_get\",\n", "\t\tHelp: \"summary for dashboard get duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_get_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 135 }
package sqlstore import ( "testing" . "github.com/smartystreets/goconvey/convey" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" ) func TestAccountDataAccess(t *testing.T) { Convey("Testing Account DB Access", t, func() { InitTestDB(t) Convey("Given single org mode", func() { setting.AutoAssignOrg = true setting.AutoAssignOrgRole = "Viewer" Convey("Users should be added to default organization", func() { ac1cmd := m.CreateUserCommand{Login: "ac1", Email: "[email protected]", Name: "ac1 name"} ac2cmd := m.CreateUserCommand{Login: "ac2", Email: "[email protected]", Name: "ac2 name"} err := CreateUser(&ac1cmd) So(err, ShouldBeNil) err = CreateUser(&ac2cmd) So(err, ShouldBeNil) q1 := m.GetUserOrgListQuery{UserId: ac1cmd.Result.Id} q2 := m.GetUserOrgListQuery{UserId: ac2cmd.Result.Id} GetUserOrgList(&q1) GetUserOrgList(&q2) So(q1.Result[0].OrgId, ShouldEqual, q2.Result[0].OrgId) So(q1.Result[0].Role, ShouldEqual, "Viewer") }) }) Convey("Given two saved users", func() { setting.AutoAssignOrg = false ac1cmd := m.CreateUserCommand{Login: "ac1", Email: "[email protected]", Name: "ac1 name"} ac2cmd := m.CreateUserCommand{Login: "ac2", Email: "[email protected]", Name: "ac2 name", IsAdmin: true} err := CreateUser(&ac1cmd) err = CreateUser(&ac2cmd) So(err, ShouldBeNil) ac1 := ac1cmd.Result ac2 := ac2cmd.Result Convey("Should be able to read user info projection", func() { query := m.GetUserProfileQuery{UserId: ac1.Id} err = GetUserProfile(&query) So(err, ShouldBeNil) So(query.Result.Email, ShouldEqual, "[email protected]") So(query.Result.Login, ShouldEqual, "ac1") }) Convey("Can search users", func() { query := m.SearchUsersQuery{Query: ""} err := SearchUsers(&query) So(err, ShouldBeNil) So(query.Result.Users[0].Email, ShouldEqual, "[email protected]") So(query.Result.Users[1].Email, ShouldEqual, "[email protected]") }) Convey("Given an added org user", func() { cmd := m.AddOrgUserCommand{ OrgId: ac1.OrgId, UserId: ac2.Id, Role: m.ROLE_VIEWER, } err := AddOrgUser(&cmd) Convey("Should have been saved without error", func() { So(err, ShouldBeNil) }) Convey("Can update org user role", func() { updateCmd := m.UpdateOrgUserCommand{OrgId: ac1.OrgId, UserId: ac2.Id, Role: m.ROLE_ADMIN} err = UpdateOrgUser(&updateCmd) So(err, ShouldBeNil) orgUsersQuery := m.GetOrgUsersQuery{OrgId: ac1.OrgId} err = GetOrgUsers(&orgUsersQuery) So(err, ShouldBeNil) So(orgUsersQuery.Result[1].Role, ShouldEqual, m.ROLE_ADMIN) }) Convey("Can get logged in user projection", func() { query := m.GetSignedInUserQuery{UserId: ac2.Id} err := GetSignedInUser(&query) So(err, ShouldBeNil) So(query.Result.Email, ShouldEqual, "[email protected]") So(query.Result.OrgId, ShouldEqual, ac2.OrgId) So(query.Result.Name, ShouldEqual, "ac2 name") So(query.Result.Login, ShouldEqual, "ac2") So(query.Result.OrgRole, ShouldEqual, "Admin") So(query.Result.OrgName, ShouldEqual, "[email protected]") So(query.Result.IsGrafanaAdmin, ShouldBeTrue) }) Convey("Can get user organizations", func() { query := m.GetUserOrgListQuery{UserId: ac2.Id} err := GetUserOrgList(&query) So(err, ShouldBeNil) So(len(query.Result), ShouldEqual, 2) }) Convey("Can get organization users", func() { query := m.GetOrgUsersQuery{OrgId: ac1.OrgId} err := GetOrgUsers(&query) So(err, ShouldBeNil) So(len(query.Result), ShouldEqual, 2) So(query.Result[0].Role, ShouldEqual, "Admin") }) Convey("Can set using org", func() { cmd := m.SetUsingOrgCommand{UserId: ac2.Id, OrgId: ac1.Id} err := SetUsingOrg(&cmd) So(err, ShouldBeNil) Convey("SignedInUserQuery with a different org", func() { query := m.GetSignedInUserQuery{UserId: ac2.Id} err := GetSignedInUser(&query) So(err, ShouldBeNil) So(query.Result.OrgId, ShouldEqual, ac1.Id) So(query.Result.Email, ShouldEqual, "[email protected]") So(query.Result.Name, ShouldEqual, "ac2 name") So(query.Result.Login, ShouldEqual, "ac2") So(query.Result.OrgName, ShouldEqual, "[email protected]") So(query.Result.OrgRole, ShouldEqual, "Viewer") }) }) Convey("Cannot delete last admin org user", func() { cmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac1.Id} err := RemoveOrgUser(&cmd) So(err, ShouldEqual, m.ErrLastOrgAdmin) }) Convey("Cannot update role so no one is admin user", func() { cmd := m.UpdateOrgUserCommand{OrgId: ac1.OrgId, UserId: ac1.Id, Role: m.ROLE_VIEWER} err := UpdateOrgUser(&cmd) So(err, ShouldEqual, m.ErrLastOrgAdmin) }) }) }) }) }
pkg/services/sqlstore/org_test.go
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017928298620972782, 0.00017345424566883594, 0.0001670645287958905, 0.0001736646518111229, 0.00000305058051708329 ]
{ "id": 2, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_get\",\n", "\t\tHelp: \"summary for dashboard get duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_get_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 135 }
Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
vendor/github.com/prometheus/procfs/LICENSE
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00018125816131941974, 0.00017535037477500737, 0.00017065818246919662, 0.00017452120664529502, 0.0000024024868707783753 ]
{ "id": 3, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_search\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_search_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 141 }
package metrics import ( "bytes" "encoding/json" "net/http" "runtime" "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" ) const exporterName = "grafana" var ( M_Instance_Start prometheus.Counter M_Page_Status *prometheus.CounterVec M_Api_Status *prometheus.CounterVec M_Proxy_Status *prometheus.CounterVec M_Http_Request_Total *prometheus.CounterVec M_Http_Request_Summary *prometheus.SummaryVec M_Api_User_SignUpStarted prometheus.Counter M_Api_User_SignUpCompleted prometheus.Counter M_Api_User_SignUpInvite prometheus.Counter M_Api_Dashboard_Save prometheus.Summary M_Api_Dashboard_Get prometheus.Summary M_Api_Dashboard_Search prometheus.Summary M_Api_Admin_User_Create prometheus.Counter M_Api_Login_Post prometheus.Counter M_Api_Login_OAuth prometheus.Counter M_Api_Org_Create prometheus.Counter M_Api_Dashboard_Snapshot_Create prometheus.Counter M_Api_Dashboard_Snapshot_External prometheus.Counter M_Api_Dashboard_Snapshot_Get prometheus.Counter M_Api_Dashboard_Insert prometheus.Counter M_Alerting_Result_State *prometheus.CounterVec M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers M_DataSource_ProxyReq_Timer prometheus.Summary M_Alerting_Execution_Time prometheus.Summary // StatTotals M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge ) func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", Help: "counter for started instances", Namespace: exporterName, }) M_Page_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, }, []string{"code"}, ) M_Api_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, }, []string{"code"}, ) M_Proxy_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, }, []string{"code"}, ) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "http_request_total", Help: "http request counter", }, []string{"handler", "statuscode", "method"}, ) M_Http_Request_Summary = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "http_request_duration", Help: "http request summary", }, []string{"handler", "statuscode", "method"}, ) M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, }) M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_save", Help: "summary for dashboard save duration", Namespace: exporterName, }) M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_get", Help: "summary for dashboard get duration", Namespace: exporterName, }) M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_search", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, }) M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_result_total", Help: "alert execution result counter", Namespace: exporterName, }, []string{"state"}) M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_notification_sent_total", Help: "counter for how many alert notifications been sent", Namespace: exporterName, }, []string{"type"}) M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, }) M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dataproxy_request_all", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "alerting_execution_time_seconds", Help: "summary of alert exeuction duration", Namespace: exporterName, }) M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "alerting_active_alerts", Help: "amount of active alerts", Namespace: exporterName, }) M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", Namespace: exporterName, }) M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_users", Help: "total amount of users", Namespace: exporterName, }) M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", Namespace: exporterName, }) M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_playlists", Help: "total amount of playlists", Namespace: exporterName, }) } func initMetricVars(settings *MetricSettings) { prometheus.MustRegister( M_Instance_Start, M_Page_Status, M_Api_Status, M_Proxy_Status, M_Http_Request_Total, M_Http_Request_Summary, M_Api_User_SignUpStarted, M_Api_User_SignUpCompleted, M_Api_User_SignUpInvite, M_Api_Dashboard_Save, M_Api_Dashboard_Get, M_Api_Dashboard_Search, M_DataSource_ProxyReq_Timer, M_Alerting_Execution_Time, M_Api_Admin_User_Create, M_Api_Login_Post, M_Api_Login_OAuth, M_Api_Org_Create, M_Api_Dashboard_Snapshot_Create, M_Api_Dashboard_Snapshot_External, M_Api_Dashboard_Snapshot_Get, M_Api_Dashboard_Insert, M_Alerting_Result_State, M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, M_StatTotal_Orgs, M_StatTotal_Playlists) go instrumentationLoop(settings) } func instrumentationLoop(settings *MetricSettings) chan struct{} { M_Instance_Start.Inc() onceEveryDayTick := time.NewTicker(time.Hour * 24) secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) for { select { case <-onceEveryDayTick.C: sendUsageStats() case <-secondTicker.C: updateTotalStats() } } } var metricPublishCounter int64 = 0 func updateTotalStats() { metricPublishCounter++ if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } } func sendUsageStats() { if !setting.ReportingEnabled { return } metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") version := strings.Replace(setting.BuildVersion, ".", "_", -1) metrics := map[string]interface{}{} report := map[string]interface{}{ "version": version, "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, } statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { metricsLogger.Error("Failed to get datasource stats", "error", err) return } // send counters for each data source // but ignore any custom data sources // as sending that name could be sensitive information dsOtherCount := 0 for _, dsStat := range dsStats.Result { if models.IsKnownDataSourcePlugin(dsStat.Type) { metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count } else { dsOtherCount += dsStat.Count } } metrics["stats.ds.other.count"] = dsOtherCount out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: time.Duration(5 * time.Second)} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) }
pkg/metrics/metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.9981704950332642, 0.12469713389873505, 0.00016679760301485658, 0.011400427669286728, 0.2709105908870697 ]
{ "id": 3, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_search\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_search_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 141 }
Copyright (C) 2013 Blake Mizerany Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
vendor/github.com/beorn7/perks/LICENSE
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017839696374721825, 0.00017413341265637428, 0.00016993995814118534, 0.00017406335973646492, 0.000003452913460932905 ]
{ "id": 3, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_search\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_search_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 141 }
@import "variables"; @import "variables.light"; @import "grafana";
public/sass/grafana.light.scss
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017738432507030666, 0.00017738432507030666, 0.00017738432507030666, 0.00017738432507030666, 0 ]
{ "id": 3, "code_window": [ "\t})\n", "\n", "\tM_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dashboard_search\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dashboard_search_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 141 }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build go1.7 // Package ctxhttp provides helper functions for performing context-aware HTTP requests. package ctxhttp // import "golang.org/x/net/context/ctxhttp" import ( "io" "net/http" "net/url" "strings" "golang.org/x/net/context" ) // Do sends an HTTP request with the provided http.Client and returns // an HTTP response. // // If the client is nil, http.DefaultClient is used. // // The provided ctx must be non-nil. If it is canceled or times out, // ctx.Err() will be returned. func Do(ctx context.Context, client *http.Client, req *http.Request) (*http.Response, error) { if client == nil { client = http.DefaultClient } resp, err := client.Do(req.WithContext(ctx)) // If we got an error, and the context has been canceled, // the context's error is probably more useful. if err != nil { select { case <-ctx.Done(): err = ctx.Err() default: } } return resp, err } // Get issues a GET request via the Do function. func Get(ctx context.Context, client *http.Client, url string) (*http.Response, error) { req, err := http.NewRequest("GET", url, nil) if err != nil { return nil, err } return Do(ctx, client, req) } // Head issues a HEAD request via the Do function. func Head(ctx context.Context, client *http.Client, url string) (*http.Response, error) { req, err := http.NewRequest("HEAD", url, nil) if err != nil { return nil, err } return Do(ctx, client, req) } // Post issues a POST request via the Do function. func Post(ctx context.Context, client *http.Client, url string, bodyType string, body io.Reader) (*http.Response, error) { req, err := http.NewRequest("POST", url, body) if err != nil { return nil, err } req.Header.Set("Content-Type", bodyType) return Do(ctx, client, req) } // PostForm issues a POST request via the Do function. func PostForm(ctx context.Context, client *http.Client, url string, data url.Values) (*http.Response, error) { return Post(ctx, client, url, "application/x-www-form-urlencoded", strings.NewReader(data.Encode())) }
vendor/golang.org/x/net/context/ctxhttp/ctxhttp.go
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017555213707964867, 0.000170546438312158, 0.00016723426233511418, 0.0001694055536063388, 0.000003010982254636474 ]
{ "id": 4, "code_window": [ "\t\tHelp: \"counter for getting datasource by id\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dataproxy_request_all\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dataproxy_request_all_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 225 }
package alerting import ( "strconv" "strings" "time" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/models" ) type DefaultEvalHandler struct { log log.Logger alertJobTimeout time.Duration } func NewEvalHandler() *DefaultEvalHandler { return &DefaultEvalHandler{ log: log.New("alerting.evalHandler"), alertJobTimeout: time.Second * 5, } } func (e *DefaultEvalHandler) Eval(context *EvalContext) { firing := true noDataFound := true conditionEvals := "" for i := 0; i < len(context.Rule.Conditions); i++ { condition := context.Rule.Conditions[i] cr, err := condition.Eval(context) if err != nil { context.Error = err } // break if condition could not be evaluated if context.Error != nil { break } // calculating Firing based on operator if cr.Operator == "or" { firing = firing || cr.Firing noDataFound = noDataFound || cr.NoDataFound } else { firing = firing && cr.Firing noDataFound = noDataFound && cr.NoDataFound } if i > 0 { conditionEvals = "[" + conditionEvals + " " + strings.ToUpper(cr.Operator) + " " + strconv.FormatBool(cr.Firing) + "]" } else { conditionEvals = strconv.FormatBool(firing) } context.EvalMatches = append(context.EvalMatches, cr.EvalMatches...) } context.ConditionEvals = conditionEvals + " = " + strconv.FormatBool(firing) context.Firing = firing context.NoDataFound = noDataFound context.EndTime = time.Now() context.Rule.State = e.getNewState(context) elapsedTime := context.EndTime.Sub(context.StartTime).Seconds() metrics.M_Alerting_Execution_Time.Observe(elapsedTime) } // This should be move into evalContext once its been refactored. func (handler *DefaultEvalHandler) getNewState(evalContext *EvalContext) models.AlertStateType { if evalContext.Error != nil { handler.log.Error("Alert Rule Result Error", "ruleId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "error", evalContext.Error, "changing state to", evalContext.Rule.ExecutionErrorState.ToAlertState()) if evalContext.Rule.ExecutionErrorState == models.ExecutionErrorKeepState { return evalContext.PrevAlertState } else { return evalContext.Rule.ExecutionErrorState.ToAlertState() } } else if evalContext.Firing { return models.AlertStateAlerting } else if evalContext.NoDataFound { handler.log.Info("Alert Rule returned no data", "ruleId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "changing state to", evalContext.Rule.NoDataState.ToAlertState()) if evalContext.Rule.NoDataState == models.NoDataKeepState { return evalContext.PrevAlertState } else { return evalContext.Rule.NoDataState.ToAlertState() } } return models.AlertStateOK }
pkg/services/alerting/eval_handler.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.9963659048080444, 0.09073320031166077, 0.00016335656982846558, 0.0001707590854493901, 0.2863862216472626 ]
{ "id": 4, "code_window": [ "\t\tHelp: \"counter for getting datasource by id\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dataproxy_request_all\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dataproxy_request_all_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 225 }
foo.badkey.{nokey: nokey, alsonokey: alsonokey}
vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-429
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017513064085505903, 0.00017513064085505903, 0.00017513064085505903, 0.00017513064085505903, 0 ]
{ "id": 4, "code_window": [ "\t\tHelp: \"counter for getting datasource by id\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dataproxy_request_all\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dataproxy_request_all_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 225 }
{ "name": "jquery", "main": "dist/jquery.js", "license": "MIT", "ignore": [ "package.json" ], "keywords": [ "jquery", "javascript", "browser", "library" ], "homepage": "https://github.com/jquery/jquery-dist", "version": "3.1.0", "_release": "3.1.0", "_resolution": { "type": "version", "tag": "3.1.0", "commit": "6f02bc382c0529d3b4f68f6b2ad21876642dbbfe" }, "_source": "https://github.com/jquery/jquery-dist.git", "_target": "3.1.0", "_originalSource": "jquery" }
public/vendor/jquery/.bower.json
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017555798694957048, 0.00017481537361163646, 0.00017399679927621037, 0.000174891363712959, 6.396126082108822e-7 ]
{ "id": 4, "code_window": [ "\t\tHelp: \"counter for getting datasource by id\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"api_dataproxy_request_all\",\n", "\t\tHelp: \"summary for dashboard search duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"api_dataproxy_request_all_milleseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 225 }
foo[10:5:-1]
vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-520
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017269962700083852, 0.00017269962700083852, 0.00017269962700083852, 0.00017269962700083852, 0 ]
{ "id": 5, "code_window": [ "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"alerting_execution_time_seconds\",\n", "\t\tHelp: \"summary of alert exeuction duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"alerting_execution_time_milliseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 231 }
package middleware import ( "net/http" "strconv" "strings" "time" "github.com/grafana/grafana/pkg/metrics" "gopkg.in/macaron.v1" ) func RequestMetrics(handler string) macaron.Handler { return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) { rw := res.(macaron.ResponseWriter) now := time.Now() c.Next() status := rw.Status() code := sanitizeCode(status) method := sanitizeMethod(req.Method) metrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc() metrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(time.Since(now).Seconds()) if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") { countProxyRequests(status) } else if strings.HasPrefix(req.RequestURI, "/api/") { countApiRequests(status) } else { countPageRequests(status) } } } func countApiRequests(status int) { switch status { case 200: metrics.M_Api_Status.WithLabelValues("200").Inc() case 404: metrics.M_Api_Status.WithLabelValues("404").Inc() case 500: metrics.M_Api_Status.WithLabelValues("500").Inc() default: metrics.M_Api_Status.WithLabelValues("unknown").Inc() } } func countPageRequests(status int) { switch status { case 200: metrics.M_Page_Status.WithLabelValues("200").Inc() case 404: metrics.M_Page_Status.WithLabelValues("404").Inc() case 500: metrics.M_Page_Status.WithLabelValues("500").Inc() default: metrics.M_Page_Status.WithLabelValues("unknown").Inc() } } func countProxyRequests(status int) { switch status { case 200: metrics.M_Proxy_Status.WithLabelValues("200").Inc() case 404: metrics.M_Proxy_Status.WithLabelValues("400").Inc() case 500: metrics.M_Proxy_Status.WithLabelValues("500").Inc() default: metrics.M_Proxy_Status.WithLabelValues("unknown").Inc() } } func sanitizeMethod(m string) string { switch m { case "GET", "get": return "get" case "PUT", "put": return "put" case "HEAD", "head": return "head" case "POST", "post": return "post" case "DELETE", "delete": return "delete" case "CONNECT", "connect": return "connect" case "OPTIONS", "options": return "options" case "NOTIFY", "notify": return "notify" default: return strings.ToLower(m) } } // If the wrapped http.Handler has not set a status code, i.e. the value is // currently 0, santizeCode will return 200, for consistency with behavior in // the stdlib. func sanitizeCode(s int) string { switch s { case 100: return "100" case 101: return "101" case 200, 0: return "200" case 201: return "201" case 202: return "202" case 203: return "203" case 204: return "204" case 205: return "205" case 206: return "206" case 300: return "300" case 301: return "301" case 302: return "302" case 304: return "304" case 305: return "305" case 307: return "307" case 400: return "400" case 401: return "401" case 402: return "402" case 403: return "403" case 404: return "404" case 405: return "405" case 406: return "406" case 407: return "407" case 408: return "408" case 409: return "409" case 410: return "410" case 411: return "411" case 412: return "412" case 413: return "413" case 414: return "414" case 415: return "415" case 416: return "416" case 417: return "417" case 418: return "418" case 500: return "500" case 501: return "501" case 502: return "502" case 503: return "503" case 504: return "504" case 505: return "505" case 428: return "428" case 429: return "429" case 431: return "431" case 511: return "511" default: return strconv.Itoa(s) } }
pkg/middleware/request_metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00034114194568246603, 0.00018038999405689538, 0.00016361253801733255, 0.0001745010813465342, 0.000036236266168998554 ]
{ "id": 5, "code_window": [ "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"alerting_execution_time_seconds\",\n", "\t\tHelp: \"summary of alert exeuction duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"alerting_execution_time_milliseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 231 }
' [foo] '
vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-416
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017612436204217374, 0.00017612436204217374, 0.00017612436204217374, 0.00017612436204217374, 0 ]
{ "id": 5, "code_window": [ "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"alerting_execution_time_seconds\",\n", "\t\tHelp: \"summary of alert exeuction duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"alerting_execution_time_milliseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 231 }
define( function() { "use strict"; // [[Class]] -> type pairs return {}; } );
public/vendor/jquery/src/var/class2type.js
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.0001751211384544149, 0.0001751211384544149, 0.0001751211384544149, 0.0001751211384544149, 0 ]
{ "id": 5, "code_window": [ "\t\tNamespace: exporterName,\n", "\t})\n", "\n", "\tM_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{\n", "\t\tName: \"alerting_execution_time_seconds\",\n", "\t\tHelp: \"summary of alert exeuction duration\",\n", "\t\tNamespace: exporterName,\n", "\t})\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\tName: \"alerting_execution_time_milliseconds\",\n" ], "file_path": "pkg/metrics/metrics.go", "type": "replace", "edit_start_line_idx": 231 }
outer.bar || outer.baz
vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-473
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017307316011283547, 0.00017307316011283547, 0.00017307316011283547, 0.00017307316011283547, 0 ]
{ "id": 6, "code_window": [ "\t\tstatus := rw.Status()\n", "\n", "\t\tcode := sanitizeCode(status)\n", "\t\tmethod := sanitizeMethod(req.Method)\n", "\t\tmetrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(time.Since(now).Seconds())\n", "\n", "\t\tif strings.HasPrefix(req.RequestURI, \"/api/datasources/proxy\") {\n", "\t\t\tcountProxyRequests(status)\n", "\t\t} else if strings.HasPrefix(req.RequestURI, \"/api/\") {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tduration := time.Since(now).Nanoseconds() / int64(time.Millisecond)\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))\n" ], "file_path": "pkg/middleware/request_metrics.go", "type": "replace", "edit_start_line_idx": 23 }
package metrics import ( "bytes" "encoding/json" "net/http" "runtime" "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" ) const exporterName = "grafana" var ( M_Instance_Start prometheus.Counter M_Page_Status *prometheus.CounterVec M_Api_Status *prometheus.CounterVec M_Proxy_Status *prometheus.CounterVec M_Http_Request_Total *prometheus.CounterVec M_Http_Request_Summary *prometheus.SummaryVec M_Api_User_SignUpStarted prometheus.Counter M_Api_User_SignUpCompleted prometheus.Counter M_Api_User_SignUpInvite prometheus.Counter M_Api_Dashboard_Save prometheus.Summary M_Api_Dashboard_Get prometheus.Summary M_Api_Dashboard_Search prometheus.Summary M_Api_Admin_User_Create prometheus.Counter M_Api_Login_Post prometheus.Counter M_Api_Login_OAuth prometheus.Counter M_Api_Org_Create prometheus.Counter M_Api_Dashboard_Snapshot_Create prometheus.Counter M_Api_Dashboard_Snapshot_External prometheus.Counter M_Api_Dashboard_Snapshot_Get prometheus.Counter M_Api_Dashboard_Insert prometheus.Counter M_Alerting_Result_State *prometheus.CounterVec M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers M_DataSource_ProxyReq_Timer prometheus.Summary M_Alerting_Execution_Time prometheus.Summary // StatTotals M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge ) func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", Help: "counter for started instances", Namespace: exporterName, }) M_Page_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, }, []string{"code"}, ) M_Api_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, }, []string{"code"}, ) M_Proxy_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, }, []string{"code"}, ) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "http_request_total", Help: "http request counter", }, []string{"handler", "statuscode", "method"}, ) M_Http_Request_Summary = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "http_request_duration", Help: "http request summary", }, []string{"handler", "statuscode", "method"}, ) M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, }) M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_save", Help: "summary for dashboard save duration", Namespace: exporterName, }) M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_get", Help: "summary for dashboard get duration", Namespace: exporterName, }) M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_search", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, }) M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_result_total", Help: "alert execution result counter", Namespace: exporterName, }, []string{"state"}) M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_notification_sent_total", Help: "counter for how many alert notifications been sent", Namespace: exporterName, }, []string{"type"}) M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, }) M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dataproxy_request_all", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "alerting_execution_time_seconds", Help: "summary of alert exeuction duration", Namespace: exporterName, }) M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "alerting_active_alerts", Help: "amount of active alerts", Namespace: exporterName, }) M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", Namespace: exporterName, }) M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_users", Help: "total amount of users", Namespace: exporterName, }) M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", Namespace: exporterName, }) M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_playlists", Help: "total amount of playlists", Namespace: exporterName, }) } func initMetricVars(settings *MetricSettings) { prometheus.MustRegister( M_Instance_Start, M_Page_Status, M_Api_Status, M_Proxy_Status, M_Http_Request_Total, M_Http_Request_Summary, M_Api_User_SignUpStarted, M_Api_User_SignUpCompleted, M_Api_User_SignUpInvite, M_Api_Dashboard_Save, M_Api_Dashboard_Get, M_Api_Dashboard_Search, M_DataSource_ProxyReq_Timer, M_Alerting_Execution_Time, M_Api_Admin_User_Create, M_Api_Login_Post, M_Api_Login_OAuth, M_Api_Org_Create, M_Api_Dashboard_Snapshot_Create, M_Api_Dashboard_Snapshot_External, M_Api_Dashboard_Snapshot_Get, M_Api_Dashboard_Insert, M_Alerting_Result_State, M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, M_StatTotal_Orgs, M_StatTotal_Playlists) go instrumentationLoop(settings) } func instrumentationLoop(settings *MetricSettings) chan struct{} { M_Instance_Start.Inc() onceEveryDayTick := time.NewTicker(time.Hour * 24) secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) for { select { case <-onceEveryDayTick.C: sendUsageStats() case <-secondTicker.C: updateTotalStats() } } } var metricPublishCounter int64 = 0 func updateTotalStats() { metricPublishCounter++ if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } } func sendUsageStats() { if !setting.ReportingEnabled { return } metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") version := strings.Replace(setting.BuildVersion, ".", "_", -1) metrics := map[string]interface{}{} report := map[string]interface{}{ "version": version, "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, } statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { metricsLogger.Error("Failed to get datasource stats", "error", err) return } // send counters for each data source // but ignore any custom data sources // as sending that name could be sensitive information dsOtherCount := 0 for _, dsStat := range dsStats.Result { if models.IsKnownDataSourcePlugin(dsStat.Type) { metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count } else { dsOtherCount += dsStat.Count } } metrics["stats.ds.other.count"] = dsOtherCount out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: time.Duration(5 * time.Second)} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) }
pkg/metrics/metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.0025848252698779106, 0.0003903837641701102, 0.00016278063412755728, 0.00017003968241624534, 0.0005657139699906111 ]
{ "id": 6, "code_window": [ "\t\tstatus := rw.Status()\n", "\n", "\t\tcode := sanitizeCode(status)\n", "\t\tmethod := sanitizeMethod(req.Method)\n", "\t\tmetrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(time.Since(now).Seconds())\n", "\n", "\t\tif strings.HasPrefix(req.RequestURI, \"/api/datasources/proxy\") {\n", "\t\t\tcountProxyRequests(status)\n", "\t\t} else if strings.HasPrefix(req.RequestURI, \"/api/\") {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tduration := time.Since(now).Nanoseconds() / int64(time.Millisecond)\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))\n" ], "file_path": "pkg/middleware/request_metrics.go", "type": "replace", "edit_start_line_idx": 23 }
"E4"
vendor/github.com/jmespath/go-jmespath/fuzz/corpus/expr-212
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017413323803339154, 0.00017413323803339154, 0.00017413323803339154, 0.00017413323803339154, 0 ]
{ "id": 6, "code_window": [ "\t\tstatus := rw.Status()\n", "\n", "\t\tcode := sanitizeCode(status)\n", "\t\tmethod := sanitizeMethod(req.Method)\n", "\t\tmetrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(time.Since(now).Seconds())\n", "\n", "\t\tif strings.HasPrefix(req.RequestURI, \"/api/datasources/proxy\") {\n", "\t\t\tcountProxyRequests(status)\n", "\t\t} else if strings.HasPrefix(req.RequestURI, \"/api/\") {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tduration := time.Since(now).Nanoseconds() / int64(time.Millisecond)\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))\n" ], "file_path": "pkg/middleware/request_metrics.go", "type": "replace", "edit_start_line_idx": 23 }
all: deps build deps-go: go run build.go setup deps-js: yarn install --pure-lockfile --no-progress deps: deps-js build-go: go run build.go build build-js: npm run build build: build-go build-js test-go: go test -v ./pkg/... test-js: npm test test: test-go test-js run: ./bin/grafana-server
Makefile
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017282662156503648, 0.00017207128985319287, 0.00017102387209888548, 0.00017236336134374142, 7.643985782124219e-7 ]
{ "id": 6, "code_window": [ "\t\tstatus := rw.Status()\n", "\n", "\t\tcode := sanitizeCode(status)\n", "\t\tmethod := sanitizeMethod(req.Method)\n", "\t\tmetrics.M_Http_Request_Total.WithLabelValues(handler, code, method).Inc()\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(time.Since(now).Seconds())\n", "\n", "\t\tif strings.HasPrefix(req.RequestURI, \"/api/datasources/proxy\") {\n", "\t\t\tcountProxyRequests(status)\n", "\t\t} else if strings.HasPrefix(req.RequestURI, \"/api/\") {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tduration := time.Since(now).Nanoseconds() / int64(time.Millisecond)\n", "\t\tmetrics.M_Http_Request_Summary.WithLabelValues(handler, code, method).Observe(float64(duration))\n" ], "file_path": "pkg/middleware/request_metrics.go", "type": "replace", "edit_start_line_idx": 23 }
Copyright (c) 2013 The Gorilla WebSocket Authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
vendor/github.com/gorilla/websocket/LICENSE
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017307349480688572, 0.00017164263408631086, 0.00016929092817008495, 0.00017256349383387715, 0.000001675894395702926 ]
{ "id": 7, "code_window": [ "\tcontext.NoDataFound = noDataFound\n", "\tcontext.EndTime = time.Now()\n", "\tcontext.Rule.State = e.getNewState(context)\n", "\n", "\telapsedTime := context.EndTime.Sub(context.StartTime).Seconds()\n", "\tmetrics.M_Alerting_Execution_Time.Observe(elapsedTime)\n", "}\n", "\n", "// This should be move into evalContext once its been refactored.\n", "func (handler *DefaultEvalHandler) getNewState(evalContext *EvalContext) models.AlertStateType {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\telapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)\n", "\tmetrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))\n" ], "file_path": "pkg/services/alerting/eval_handler.go", "type": "replace", "edit_start_line_idx": 65 }
package metrics import ( "bytes" "encoding/json" "net/http" "runtime" "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" ) const exporterName = "grafana" var ( M_Instance_Start prometheus.Counter M_Page_Status *prometheus.CounterVec M_Api_Status *prometheus.CounterVec M_Proxy_Status *prometheus.CounterVec M_Http_Request_Total *prometheus.CounterVec M_Http_Request_Summary *prometheus.SummaryVec M_Api_User_SignUpStarted prometheus.Counter M_Api_User_SignUpCompleted prometheus.Counter M_Api_User_SignUpInvite prometheus.Counter M_Api_Dashboard_Save prometheus.Summary M_Api_Dashboard_Get prometheus.Summary M_Api_Dashboard_Search prometheus.Summary M_Api_Admin_User_Create prometheus.Counter M_Api_Login_Post prometheus.Counter M_Api_Login_OAuth prometheus.Counter M_Api_Org_Create prometheus.Counter M_Api_Dashboard_Snapshot_Create prometheus.Counter M_Api_Dashboard_Snapshot_External prometheus.Counter M_Api_Dashboard_Snapshot_Get prometheus.Counter M_Api_Dashboard_Insert prometheus.Counter M_Alerting_Result_State *prometheus.CounterVec M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers M_DataSource_ProxyReq_Timer prometheus.Summary M_Alerting_Execution_Time prometheus.Summary // StatTotals M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge ) func init() { M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{ Name: "instance_start_total", Help: "counter for started instances", Namespace: exporterName, }) M_Page_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "page_response_status_total", Help: "page http response status", Namespace: exporterName, }, []string{"code"}, ) M_Api_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "api_response_status_total", Help: "api http response status", Namespace: exporterName, }, []string{"code"}, ) M_Proxy_Status = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "proxy_response_status_total", Help: "proxy http response status", Namespace: exporterName, }, []string{"code"}, ) M_Http_Request_Total = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "http_request_total", Help: "http request counter", }, []string{"handler", "statuscode", "method"}, ) M_Http_Request_Summary = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "http_request_duration", Help: "http request summary", }, []string{"handler", "statuscode", "method"}, ) M_Api_User_SignUpStarted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_started_total", Help: "amount of users who started the signup flow", Namespace: exporterName, }) M_Api_User_SignUpCompleted = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_completed_total", Help: "amount of users who completed the signup flow", Namespace: exporterName, }) M_Api_User_SignUpInvite = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_user_signup_invite_total", Help: "amount of users who have been invited", Namespace: exporterName, }) M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_save", Help: "summary for dashboard save duration", Namespace: exporterName, }) M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_get", Help: "summary for dashboard get duration", Namespace: exporterName, }) M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dashboard_search", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Api_Admin_User_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_admin_user_created_total", Help: "api admin user created counter", Namespace: exporterName, }) M_Api_Login_Post = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_post_total", Help: "api login post counter", Namespace: exporterName, }) M_Api_Login_OAuth = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_login_oauth_total", Help: "api login oauth counter", Namespace: exporterName, }) M_Api_Org_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_org_create_total", Help: "api org created counter", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Create = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_create_total", Help: "dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_External = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_external_total", Help: "external dashboard snapshots created", Namespace: exporterName, }) M_Api_Dashboard_Snapshot_Get = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_dashboard_snapshot_get_total", Help: "loaded dashboards", Namespace: exporterName, }) M_Api_Dashboard_Insert = prometheus.NewCounter(prometheus.CounterOpts{ Name: "api_models_dashboard_insert_total", Help: "dashboards inserted ", Namespace: exporterName, }) M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_result_total", Help: "alert execution result counter", Namespace: exporterName, }, []string{"state"}) M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "alerting_notification_sent_total", Help: "counter for how many alert notifications been sent", Namespace: exporterName, }, []string{"type"}) M_Aws_CloudWatch_GetMetricStatistics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_get_metric_statistics_total", Help: "counter for getting metric statistics from aws", Namespace: exporterName, }) M_Aws_CloudWatch_ListMetrics = prometheus.NewCounter(prometheus.CounterOpts{ Name: "aws_cloudwatch_list_metrics_total", Help: "counter for getting list of metrics from aws", Namespace: exporterName, }) M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", Namespace: exporterName, }) M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "api_dataproxy_request_all", Help: "summary for dashboard search duration", Namespace: exporterName, }) M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{ Name: "alerting_execution_time_seconds", Help: "summary of alert exeuction duration", Namespace: exporterName, }) M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "alerting_active_alerts", Help: "amount of active alerts", Namespace: exporterName, }) M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_totals_dashboard", Help: "total amount of dashboards", Namespace: exporterName, }) M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_users", Help: "total amount of users", Namespace: exporterName, }) M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", Namespace: exporterName, }) M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_playlists", Help: "total amount of playlists", Namespace: exporterName, }) } func initMetricVars(settings *MetricSettings) { prometheus.MustRegister( M_Instance_Start, M_Page_Status, M_Api_Status, M_Proxy_Status, M_Http_Request_Total, M_Http_Request_Summary, M_Api_User_SignUpStarted, M_Api_User_SignUpCompleted, M_Api_User_SignUpInvite, M_Api_Dashboard_Save, M_Api_Dashboard_Get, M_Api_Dashboard_Search, M_DataSource_ProxyReq_Timer, M_Alerting_Execution_Time, M_Api_Admin_User_Create, M_Api_Login_Post, M_Api_Login_OAuth, M_Api_Org_Create, M_Api_Dashboard_Snapshot_Create, M_Api_Dashboard_Snapshot_External, M_Api_Dashboard_Snapshot_Get, M_Api_Dashboard_Insert, M_Alerting_Result_State, M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, M_StatTotal_Orgs, M_StatTotal_Playlists) go instrumentationLoop(settings) } func instrumentationLoop(settings *MetricSettings) chan struct{} { M_Instance_Start.Inc() onceEveryDayTick := time.NewTicker(time.Hour * 24) secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) for { select { case <-onceEveryDayTick.C: sendUsageStats() case <-secondTicker.C: updateTotalStats() } } } var metricPublishCounter int64 = 0 func updateTotalStats() { metricPublishCounter++ if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } } func sendUsageStats() { if !setting.ReportingEnabled { return } metricsLogger.Debug("Sending anonymous usage stats to stats.grafana.org") version := strings.Replace(setting.BuildVersion, ".", "_", -1) metrics := map[string]interface{}{} report := map[string]interface{}{ "version": version, "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, } statsQuery := models.GetSystemStatsQuery{} if err := bus.Dispatch(&statsQuery); err != nil { metricsLogger.Error("Failed to get system stats", "error", err) return } metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { metricsLogger.Error("Failed to get datasource stats", "error", err) return } // send counters for each data source // but ignore any custom data sources // as sending that name could be sensitive information dsOtherCount := 0 for _, dsStat := range dsStats.Result { if models.IsKnownDataSourcePlugin(dsStat.Type) { metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count } else { dsOtherCount += dsStat.Count } } metrics["stats.ds.other.count"] = dsOtherCount out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: time.Duration(5 * time.Second)} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) }
pkg/metrics/metrics.go
1
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.010768194682896137, 0.0007292194059118629, 0.00016218601376749575, 0.00016869240789674222, 0.0018502629827708006 ]
{ "id": 7, "code_window": [ "\tcontext.NoDataFound = noDataFound\n", "\tcontext.EndTime = time.Now()\n", "\tcontext.Rule.State = e.getNewState(context)\n", "\n", "\telapsedTime := context.EndTime.Sub(context.StartTime).Seconds()\n", "\tmetrics.M_Alerting_Execution_Time.Observe(elapsedTime)\n", "}\n", "\n", "// This should be move into evalContext once its been refactored.\n", "func (handler *DefaultEvalHandler) getNewState(evalContext *EvalContext) models.AlertStateType {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\telapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)\n", "\tmetrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))\n" ], "file_path": "pkg/services/alerting/eval_handler.go", "type": "replace", "edit_start_line_idx": 65 }
package migrator import ( "time" _ "github.com/go-sql-driver/mysql" "github.com/go-xorm/xorm" "github.com/grafana/grafana/pkg/log" _ "github.com/lib/pq" _ "github.com/mattn/go-sqlite3" ) type Migrator struct { x *xorm.Engine dialect Dialect migrations []Migration Logger log.Logger } type MigrationLog struct { Id int64 MigrationId string Sql string Success bool Error string Timestamp time.Time } func NewMigrator(engine *xorm.Engine) *Migrator { mg := &Migrator{} mg.x = engine mg.Logger = log.New("migrator") mg.migrations = make([]Migration, 0) mg.dialect = NewDialect(mg.x.DriverName()) return mg } func (mg *Migrator) AddMigration(id string, m Migration) { m.SetId(id) mg.migrations = append(mg.migrations, m) } func (mg *Migrator) GetMigrationLog() (map[string]MigrationLog, error) { logMap := make(map[string]MigrationLog) logItems := make([]MigrationLog, 0) exists, err := mg.x.IsTableExist(new(MigrationLog)) if err != nil { return nil, err } if !exists { return logMap, nil } if err = mg.x.Find(&logItems); err != nil { return nil, err } for _, logItem := range logItems { if !logItem.Success { continue } logMap[logItem.MigrationId] = logItem } return logMap, nil } func (mg *Migrator) Start() error { mg.Logger.Info("Starting DB migration") logMap, err := mg.GetMigrationLog() if err != nil { return err } for _, m := range mg.migrations { _, exists := logMap[m.Id()] if exists { mg.Logger.Debug("Skipping migration: Already executed", "id", m.Id()) continue } sql := m.Sql(mg.dialect) record := MigrationLog{ MigrationId: m.Id(), Sql: sql, Timestamp: time.Now(), } mg.Logger.Debug("Executing", "sql", sql) err := mg.inTransaction(func(sess *xorm.Session) error { if err := mg.exec(m, sess); err != nil { mg.Logger.Error("Exec failed", "error", err, "sql", sql) record.Error = err.Error() sess.Insert(&record) return err } else { record.Success = true sess.Insert(&record) } return nil }) if err != nil { return err } } return nil } func (mg *Migrator) exec(m Migration, sess *xorm.Session) error { mg.Logger.Info("Executing migration", "id", m.Id()) condition := m.GetCondition() if condition != nil { sql, args := condition.Sql(mg.dialect) results, err := sess.Query(sql, args...) if err != nil || len(results) == 0 { mg.Logger.Info("Skipping migration condition not fulfilled", "id", m.Id()) return sess.Rollback() } } _, err := sess.Exec(m.Sql(mg.dialect)) if err != nil { mg.Logger.Error("Executing migration failed", "id", m.Id(), "error", err) return err } return nil } type dbTransactionFunc func(sess *xorm.Session) error func (mg *Migrator) inTransaction(callback dbTransactionFunc) error { var err error sess := mg.x.NewSession() defer sess.Close() if err = sess.Begin(); err != nil { return err } err = callback(sess) if err != nil { sess.Rollback() return err } else if err = sess.Commit(); err != nil { return err } return nil }
pkg/services/sqlstore/migrator/migrator.go
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017297799058724195, 0.00016931061691138893, 0.00016483063518535346, 0.0001690905774012208, 0.000001974050746866851 ]
{ "id": 7, "code_window": [ "\tcontext.NoDataFound = noDataFound\n", "\tcontext.EndTime = time.Now()\n", "\tcontext.Rule.State = e.getNewState(context)\n", "\n", "\telapsedTime := context.EndTime.Sub(context.StartTime).Seconds()\n", "\tmetrics.M_Alerting_Execution_Time.Observe(elapsedTime)\n", "}\n", "\n", "// This should be move into evalContext once its been refactored.\n", "func (handler *DefaultEvalHandler) getNewState(evalContext *EvalContext) models.AlertStateType {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\telapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)\n", "\tmetrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))\n" ], "file_path": "pkg/services/alerting/eval_handler.go", "type": "replace", "edit_start_line_idx": 65 }
package xmlutil import ( "encoding/base64" "encoding/xml" "fmt" "io" "reflect" "strconv" "strings" "time" ) // UnmarshalXML deserializes an xml.Decoder into the container v. V // needs to match the shape of the XML expected to be decoded. // If the shape doesn't match unmarshaling will fail. func UnmarshalXML(v interface{}, d *xml.Decoder, wrapper string) error { n, err := XMLToStruct(d, nil) if err != nil { return err } if n.Children != nil { for _, root := range n.Children { for _, c := range root { if wrappedChild, ok := c.Children[wrapper]; ok { c = wrappedChild[0] // pull out wrapped element } err = parse(reflect.ValueOf(v), c, "") if err != nil { if err == io.EOF { return nil } return err } } } return nil } return nil } // parse deserializes any value from the XMLNode. The type tag is used to infer the type, or reflect // will be used to determine the type from r. func parse(r reflect.Value, node *XMLNode, tag reflect.StructTag) error { rtype := r.Type() if rtype.Kind() == reflect.Ptr { rtype = rtype.Elem() // check kind of actual element type } t := tag.Get("type") if t == "" { switch rtype.Kind() { case reflect.Struct: t = "structure" case reflect.Slice: t = "list" case reflect.Map: t = "map" } } switch t { case "structure": if field, ok := rtype.FieldByName("_"); ok { tag = field.Tag } return parseStruct(r, node, tag) case "list": return parseList(r, node, tag) case "map": return parseMap(r, node, tag) default: return parseScalar(r, node, tag) } } // parseStruct deserializes a structure and its fields from an XMLNode. Any nested // types in the structure will also be deserialized. func parseStruct(r reflect.Value, node *XMLNode, tag reflect.StructTag) error { t := r.Type() if r.Kind() == reflect.Ptr { if r.IsNil() { // create the structure if it's nil s := reflect.New(r.Type().Elem()) r.Set(s) r = s } r = r.Elem() t = t.Elem() } // unwrap any payloads if payload := tag.Get("payload"); payload != "" { field, _ := t.FieldByName(payload) return parseStruct(r.FieldByName(payload), node, field.Tag) } for i := 0; i < t.NumField(); i++ { field := t.Field(i) if c := field.Name[0:1]; strings.ToLower(c) == c { continue // ignore unexported fields } // figure out what this field is called name := field.Name if field.Tag.Get("flattened") != "" && field.Tag.Get("locationNameList") != "" { name = field.Tag.Get("locationNameList") } else if locName := field.Tag.Get("locationName"); locName != "" { name = locName } // try to find the field by name in elements elems := node.Children[name] if elems == nil { // try to find the field in attributes if val, ok := node.findElem(name); ok { elems = []*XMLNode{{Text: val}} } } member := r.FieldByName(field.Name) for _, elem := range elems { err := parse(member, elem, field.Tag) if err != nil { return err } } } return nil } // parseList deserializes a list of values from an XML node. Each list entry // will also be deserialized. func parseList(r reflect.Value, node *XMLNode, tag reflect.StructTag) error { t := r.Type() if tag.Get("flattened") == "" { // look at all item entries mname := "member" if name := tag.Get("locationNameList"); name != "" { mname = name } if Children, ok := node.Children[mname]; ok { if r.IsNil() { r.Set(reflect.MakeSlice(t, len(Children), len(Children))) } for i, c := range Children { err := parse(r.Index(i), c, "") if err != nil { return err } } } } else { // flattened list means this is a single element if r.IsNil() { r.Set(reflect.MakeSlice(t, 0, 0)) } childR := reflect.Zero(t.Elem()) r.Set(reflect.Append(r, childR)) err := parse(r.Index(r.Len()-1), node, "") if err != nil { return err } } return nil } // parseMap deserializes a map from an XMLNode. The direct children of the XMLNode // will also be deserialized as map entries. func parseMap(r reflect.Value, node *XMLNode, tag reflect.StructTag) error { if r.IsNil() { r.Set(reflect.MakeMap(r.Type())) } if tag.Get("flattened") == "" { // look at all child entries for _, entry := range node.Children["entry"] { parseMapEntry(r, entry, tag) } } else { // this element is itself an entry parseMapEntry(r, node, tag) } return nil } // parseMapEntry deserializes a map entry from a XML node. func parseMapEntry(r reflect.Value, node *XMLNode, tag reflect.StructTag) error { kname, vname := "key", "value" if n := tag.Get("locationNameKey"); n != "" { kname = n } if n := tag.Get("locationNameValue"); n != "" { vname = n } keys, ok := node.Children[kname] values := node.Children[vname] if ok { for i, key := range keys { keyR := reflect.ValueOf(key.Text) value := values[i] valueR := reflect.New(r.Type().Elem()).Elem() parse(valueR, value, "") r.SetMapIndex(keyR, valueR) } } return nil } // parseScaller deserializes an XMLNode value into a concrete type based on the // interface type of r. // // Error is returned if the deserialization fails due to invalid type conversion, // or unsupported interface type. func parseScalar(r reflect.Value, node *XMLNode, tag reflect.StructTag) error { switch r.Interface().(type) { case *string: r.Set(reflect.ValueOf(&node.Text)) return nil case []byte: b, err := base64.StdEncoding.DecodeString(node.Text) if err != nil { return err } r.Set(reflect.ValueOf(b)) case *bool: v, err := strconv.ParseBool(node.Text) if err != nil { return err } r.Set(reflect.ValueOf(&v)) case *int64: v, err := strconv.ParseInt(node.Text, 10, 64) if err != nil { return err } r.Set(reflect.ValueOf(&v)) case *float64: v, err := strconv.ParseFloat(node.Text, 64) if err != nil { return err } r.Set(reflect.ValueOf(&v)) case *time.Time: const ISO8601UTC = "2006-01-02T15:04:05Z" t, err := time.Parse(ISO8601UTC, node.Text) if err != nil { return err } r.Set(reflect.ValueOf(&t)) default: return fmt.Errorf("unsupported value: %v (%s)", r.Interface(), r.Type()) } return nil }
vendor/github.com/aws/aws-sdk-go/private/protocol/xml/xmlutil/unmarshal.go
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017771073908079416, 0.0001706470357021317, 0.0001643392606638372, 0.00017153765656985343, 0.0000031760478123032954 ]
{ "id": 7, "code_window": [ "\tcontext.NoDataFound = noDataFound\n", "\tcontext.EndTime = time.Now()\n", "\tcontext.Rule.State = e.getNewState(context)\n", "\n", "\telapsedTime := context.EndTime.Sub(context.StartTime).Seconds()\n", "\tmetrics.M_Alerting_Execution_Time.Observe(elapsedTime)\n", "}\n", "\n", "// This should be move into evalContext once its been refactored.\n", "func (handler *DefaultEvalHandler) getNewState(evalContext *EvalContext) models.AlertStateType {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\telapsedTime := context.EndTime.Sub(context.StartTime).Nanoseconds() / int64(time.Millisecond)\n", "\tmetrics.M_Alerting_Execution_Time.Observe(float64(elapsedTime))\n" ], "file_path": "pkg/services/alerting/eval_handler.go", "type": "replace", "edit_start_line_idx": 65 }
+++ title = "AWS CloudWatch" description = "Guide for using CloudWatch in Grafana" keywords = ["grafana", "cloudwatch", "guide"] type = "docs" aliases = ["/datasources/cloudwatch"] [menu.docs] name = "AWS Cloudwatch" identifier = "cloudwatch" parent = "datasources" weight = 10 +++ # Using AWS CloudWatch in Grafana Grafana ships with built in support for CloudWatch. You just have to add it as a data source and you will be ready to build dashboards for you CloudWatch metrics. ## Adding the data source to Grafana 1. Open the side menu by clicking the Grafana icon in the top header. 2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`. 3. Click the `+ Add data source` button in the top header. 4. Select `Cloudwatch` from the *Type* dropdown. > NOTE: If at any moment you have issues with getting this datasource to work and Grafana is giving you undescriptive errors then don't forget to check your log file (try looking in /var/log/grafana/grafana.log). Name | Description ------------ | ------------- *Name* | The data source name. This is how you refer to the data source in panels & queries. *Default* | Default data source means that it will be pre-selected for new panels. *Credentials* profile name | Specify the name of the profile to use (if you use `~/aws/credentials` file), leave blank for default. *Default Region* | Used in query editor to set region (can be changed on per query basis) *Custom Metrics namespace* | Specify the CloudWatch namespace of Custom metrics *Assume Role Arn* | Specify the ARN of the role to assume ## Authentication ### IAM Roles Currently all access to CloudWatch is done server side by the Grafana backend using the official AWS SDK. If you grafana server is running on AWS you can use IAM Roles and authentication will be handled automatically. Checkout AWS docs on [IAM Roles](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html) ### AWS credentials file Create a file at `~/.aws/credentials`. That is the `HOME` path for user running grafana-server. > NOTE: If you think you have the credentials file in the right place but it is still not working then you might try moving your .aws file to '/usr/share/grafana/' and make sure your credentials file has at most 0644 permissions. Example content: [default] aws_access_key_id = asdsadasdasdasd aws_secret_access_key = dasdasdsadasdasdasdsa region = us-west-2 ## Metric Query Editor ![](/img/docs/v43/cloudwatch_editor.png) You need to specify a namespace, metric, at least one stat, and at least one dimension. ## Templated queries Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data being displayed in your dashboard. Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different types of template variables. ### Query variable CloudWatch Datasource Plugin provides the following queries you can specify in the `Query` field in the Variable edit view. They allow you to fill a variable's options list with things like `region`, `namespaces`, `metric names` and `dimension keys/values`. Name | Description ------- | -------- *regions()* | Returns a list of regions AWS provides their service. *namespaces()* | Returns a list of namespaces CloudWatch support. *metrics(namespace, [region])* | Returns a list of metrics in the namespace. (specify region for custom metrics) *dimension_keys(namespace)* | Returns a list of dimension keys in the namespace. *dimension_values(region, namespace, metric, dimension_key)* | Returns a list of dimension values matching the specified `region`, `namespace`, `metric` and `dimension_key`. *ebs_volume_ids(region, instance_id)* | Returns a list of volume ids matching the specified `region`, `instance_id`. *ec2_instance_attribute(region, attribute_name, filters)* | Returns a list of attributes matching the specified `region`, `attribute_name`, `filters`. For details about the metrics CloudWatch provides, please refer to the [CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). #### Examples templated Queries Example dimension queries which will return list of resources for individual AWS Services: Query | Service ------- | ----- *dimension_values(us-east-1,AWS/ELB,RequestCount,LoadBalancerName)* | ELB *dimension_values(us-east-1,AWS/ElastiCache,CPUUtilization,CacheClusterId)* | ElastiCache *dimension_values(us-east-1,AWS/Redshift,CPUUtilization,ClusterIdentifier)* | RedShift *dimension_values(us-east-1,AWS/RDS,CPUUtilization,DBInstanceIdentifier)* | RDS *dimension_values(us-east-1,AWS/S3,BucketSizeBytes,BucketName)* | S3 ## ec2_instance_attribute examples ### JSON filters The `ec2_instance_attribute` query takes `filters` in JSON format. You can specify [pre-defined filters of ec2:DescribeInstances](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeInstances.html). Note that the actual filtering takes place on Amazon's servers, not in Grafana. Filters syntax: ```javascript { filter_name1: [ filter_value1 ], filter_name2: [ filter_value2 ] } ``` Example `ec2_instance_attribute()` query ec2_instance_attribute(us-east-1, InstanceId, { "tag:Environment": [ "production" ] }) ### Selecting Attributes Only 1 attribute per instance can be returned. Any flat attribute can be selected (i.e. if the attribute has a single value and isn't an object or array). Below is a list of available flat attributes: * `AmiLaunchIndex` * `Architecture` * `ClientToken` * `EbsOptimized` * `EnaSupport` * `Hypervisor` * `IamInstanceProfile` * `ImageId` * `InstanceId` * `InstanceLifecycle` * `InstanceType` * `KernelId` * `KeyName` * `LaunchTime` * `Platform` * `PrivateDnsName` * `PrivateIpAddress` * `PublicDnsName` * `PublicIpAddress` * `RamdiskId` * `RootDeviceName` * `RootDeviceType` * `SourceDestCheck` * `SpotInstanceRequestId` * `SriovNetSupport` * `SubnetId` * `VirtualizationType` * `VpcId` Tags can be selected by prepending the tag name with `Tags.` Example `ec2_instance_attribute()` query ec2_instance_attribute(us-east-1, Tags.Name, { "tag:Team": [ "sysops" ] }) ## Cost Amazon provides 1 million CloudWatch API requests each month at no additional charge. Past this, it costs $0.01 per 1,000 GetMetricStatistics or ListMetrics requests. For each query Grafana will issue a GetMetricStatistics request and every time you pick a dimension in the query editor Grafana will issue a ListMetrics request.
docs/sources/features/datasources/cloudwatch.md
0
https://github.com/grafana/grafana/commit/491e6897e756e06f39cd912ec01a79fcb0cf31c4
[ 0.00017643262981437147, 0.00016999297076836228, 0.000164009106811136, 0.00016973137098830193, 0.000003001699269589153 ]
{ "id": 0, "code_window": [ " const index = array.lastIndexOf(item);\n", " array.splice(index, 1);\n", "}\n", "\n", "function RegionParserError(message, lineNum) {\n", " this.message = `regionParser: ${message} (at line ${lineNum}).`;\n", " this.lineNum = lineNum;\n", " this.stack = (new Error()).stack;\n", "}\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "function RegionParserError(message, index) {\n", " const lineNum = index + 1;\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.js", "type": "replace", "edit_start_line_idx": 116 }
const blockC = require('./region-matchers/block-c'); const html = require('./region-matchers/html'); const inlineC = require('./region-matchers/inline-c'); const inlineCOnly = require('./region-matchers/inline-c-only'); const inlineHash = require('./region-matchers/inline-hash'); const NO_NAME_REGION = ''; const DEFAULT_PLASTER = '. . .'; const {mapObject} = require('../utils'); module.exports = function regionParser() { return regionParserImpl; }; regionParserImpl.regionMatchers = { ts: inlineC, js: inlineC, es6: inlineC, dart: inlineC, html: html, css: blockC, yaml: inlineHash, jade: inlineCOnly }; /** * @param contents string * @param fileType string * @returns {contents: string, regions: {[regionName: string]: string}} */ function regionParserImpl(contents, fileType) { const regionMatcher = regionParserImpl.regionMatchers[fileType]; const openRegions = []; const regionMap = {}; if (regionMatcher) { let plaster = regionMatcher.createPlasterComment(DEFAULT_PLASTER); const lines = contents.split(/\r?\n/).filter((line, index) => { const startRegion = line.match(regionMatcher.regionStartMatcher); const endRegion = line.match(regionMatcher.regionEndMatcher); const updatePlaster = line.match(regionMatcher.plasterMatcher); // start region processing if (startRegion) { // open up the specified region const regionNames = getRegionNames(startRegion[1]); if (regionNames.length === 0) { regionNames.push(''); } regionNames.forEach(regionName => { const region = regionMap[regionName]; if (region) { if (region.open) { throw new RegionParserError( `Tried to open a region, named "${regionName}", that is already open`, index); } region.open = true; region.lines.push(plaster); } else { regionMap[regionName] = {lines: [], open: true}; } openRegions.push(regionName); }); // end region processing } else if (endRegion) { if (openRegions.length === 0) { throw new RegionParserError('Tried to close a region when none are open', index); } // close down the specified region (or most recent if no name is given) const regionNames = getRegionNames(endRegion[1]); if (regionNames.length === 0) { regionNames.push(openRegions[openRegions.length - 1]); } regionNames.forEach(regionName => { const region = regionMap[regionName]; if (!region || !region.open) { throw new RegionParserError( `Tried to close a region, named "${regionName}", that is not open`, index); } region.open = false; removeLast(openRegions, regionName); }); // doc plaster processing } else if (updatePlaster) { plaster = regionMatcher.createPlasterComment(updatePlaster[1].trim()); // simple line of content processing } else { openRegions.forEach(regionName => regionMap[regionName].lines.push(line)); // do not filter out this line from the content return true; } // this line contained an annotation so let's filter it out return false; }); return { contents: lines.join('\n'), regions: mapObject(regionMap, (regionName, region) => region.lines.join('\n')) }; } else { return {contents, regions: {}}; } } function getRegionNames(input) { return (input.trim() === '') ? [] : input.split(',').map(name => name.trim()); } function removeLast(array, item) { const index = array.lastIndexOf(item); array.splice(index, 1); } function RegionParserError(message, lineNum) { this.message = `regionParser: ${message} (at line ${lineNum}).`; this.lineNum = lineNum; this.stack = (new Error()).stack; } RegionParserError.prototype = Object.create(Error.prototype); RegionParserError.prototype.constructor = RegionParserError;
aio/transforms/examples-package/services/region-parser.js
1
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.9992037415504456, 0.5908290147781372, 0.0023197478149086237, 0.8650766015052795, 0.46142277121543884 ]
{ "id": 0, "code_window": [ " const index = array.lastIndexOf(item);\n", " array.splice(index, 1);\n", "}\n", "\n", "function RegionParserError(message, lineNum) {\n", " this.message = `regionParser: ${message} (at line ${lineNum}).`;\n", " this.lineNum = lineNum;\n", " this.stack = (new Error()).stack;\n", "}\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "function RegionParserError(message, index) {\n", " const lineNum = index + 1;\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.js", "type": "replace", "edit_start_line_idx": 116 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {scheduleMicroTask} from '../util'; import {AnimationPlayer} from './animation_player'; export class AnimationGroupPlayer implements AnimationPlayer { private _onDoneFns: Function[] = []; private _onStartFns: Function[] = []; private _finished = false; private _started = false; private _destroyed = false; private _onDestroyFns: Function[] = []; public parentPlayer: AnimationPlayer = null; constructor(private _players: AnimationPlayer[]) { let count = 0; const total = this._players.length; if (total == 0) { scheduleMicroTask(() => this._onFinish()); } else { this._players.forEach(player => { player.parentPlayer = this; player.onDone(() => { if (++count >= total) { this._onFinish(); } }); }); } } private _onFinish() { if (!this._finished) { this._finished = true; this._onDoneFns.forEach(fn => fn()); this._onDoneFns = []; } } init(): void { this._players.forEach(player => player.init()); } onStart(fn: () => void): void { this._onStartFns.push(fn); } onDone(fn: () => void): void { this._onDoneFns.push(fn); } onDestroy(fn: () => void): void { this._onDestroyFns.push(fn); } hasStarted() { return this._started; } play() { if (!this.parentPlayer) { this.init(); } if (!this.hasStarted()) { this._onStartFns.forEach(fn => fn()); this._onStartFns = []; this._started = true; } this._players.forEach(player => player.play()); } pause(): void { this._players.forEach(player => player.pause()); } restart(): void { this._players.forEach(player => player.restart()); } finish(): void { this._onFinish(); this._players.forEach(player => player.finish()); } destroy(): void { if (!this._destroyed) { this._onFinish(); this._players.forEach(player => player.destroy()); this._destroyed = true; this._onDestroyFns.forEach(fn => fn()); this._onDestroyFns = []; } } reset(): void { this._players.forEach(player => player.reset()); this._destroyed = false; this._finished = false; this._started = false; } setPosition(p: number): void { this._players.forEach(player => { player.setPosition(p); }); } getPosition(): number { let min = 0; this._players.forEach(player => { const p = player.getPosition(); min = Math.min(p, min); }); return min; } get players(): AnimationPlayer[] { return this._players; } }
modules/@angular/animations/src/players/animation_group_player.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017913230112753808, 0.00017030454182531685, 0.00016706644964870065, 0.00016946488176472485, 0.0000030709536531503545 ]
{ "id": 0, "code_window": [ " const index = array.lastIndexOf(item);\n", " array.splice(index, 1);\n", "}\n", "\n", "function RegionParserError(message, lineNum) {\n", " this.message = `regionParser: ${message} (at line ${lineNum}).`;\n", " this.lineNum = lineNum;\n", " this.stack = (new Error()).stack;\n", "}\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "function RegionParserError(message, index) {\n", " const lineNum = index + 1;\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.js", "type": "replace", "edit_start_line_idx": 116 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ /* DO NOT DELETE THIS FILE ======================= The purpose of this file is to allow `main-dynamic.ts` to be tsc-compiled BEFORE it is copied over to each of the associated example directories within `dist/examples`. */ export class AppModule {};
modules/@angular/examples/_common/module.d.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.0001788385707186535, 0.0001760210725478828, 0.00017320358892902732, 0.0001760210725478828, 0.0000028174908948130906 ]
{ "id": 0, "code_window": [ " const index = array.lastIndexOf(item);\n", " array.splice(index, 1);\n", "}\n", "\n", "function RegionParserError(message, lineNum) {\n", " this.message = `regionParser: ${message} (at line ${lineNum}).`;\n", " this.lineNum = lineNum;\n", " this.stack = (new Error()).stack;\n", "}\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "function RegionParserError(message, index) {\n", " const lineNum = index + 1;\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.js", "type": "replace", "edit_start_line_idx": 116 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ /** * @module * @description * Entry point for all public APIs of the platform-browser package. */ export * from './src/platform-webworker'; // This file only reexports content of the `src` folder. Keep it that way.
modules/@angular/platform-webworker/public_api.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.0001783136249287054, 0.00017590753850527108, 0.00017350145208183676, 0.00017590753850527108, 0.000002406086423434317 ]
{ "id": 1, "code_window": [ "\n", " it('should error if we attempt to open an already open region', () => {\n", " expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"\", that is already open (at line 2).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 98 }
var testPackage = require('../../helpers/test-package'); var Dgeni = require('dgeni'); const testRegionMatcher = { regionStartMatcher: /^\s*\/\*\s*#docregion\s+(.*)\s*\*\/\s*$/, regionEndMatcher: /^\s*\/\*\s*#enddocregion\s+(.*)\s*\*\/\s*$/, plasterMatcher: /^\s*\/\*\s*#docplaster\s+(.*)\s*\*\/\s*$/, createPlasterComment: plaster => `/* ${plaster} */` }; describe('regionParser service', () => { var dgeni, injector, regionParser; beforeEach(function() { dgeni = new Dgeni([testPackage('examples-package', true)]); injector = dgeni.configureInjector(); regionParser = injector.get('regionParser'); regionParser.regionMatchers = {'test-type': testRegionMatcher}; }); it('should return just the contents if there is no region-matcher for the file type', () => { const output = regionParser('some contents', 'unknown'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should return just the contents if there is a region-matcher but no regions', () => { const output = regionParser('some contents', 'test-type'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should remove start region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove end region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */', '/* #enddocregion */'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove doc plaster annotations from the contents', () => { const output = regionParser(t('/* #docplaster ... elided ... */', 'abc', 'def', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should capture the rest of the contents for a region with no end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['X']).toEqual(t('def', 'ghi')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should capture the contents for a region up to the end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['']).toEqual(t('abc')); expect(output.regions['X']).toEqual(t('def')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should open a region with a null name if there is no region name', () => { const output = regionParser(t('/* #docregion */', 'abc', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual('abc'); }); it('should close the most recently opened region if there is no region name', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['Y']).toEqual(t('def')); }); it('should handle overlapping regions', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion X */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def')); expect(output.regions['Y']).toEqual(t('def', 'ghi')); }); it('should error if we attempt to open an already open region', () => { expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "", that is already open (at line 2).'); expect( () => regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "X", that is already open (at line 2).'); }); it('should error if we attempt to close an already closed region', () => { expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type')) .toThrowError('regionParser: Tried to close a region when none are open (at line 1).'); expect( () => regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to close a region, named "X", that is not open (at line 2).'); }); it('should handle whitespace in region names on single annotation', () => { const output = regionParser(t('/* #docregion A B*/', 'abc', '/* #docregion A C */', 'def'), 'test-type'); expect(output.regions['A B']).toEqual(t('abc', 'def')); expect(output.regions['A C']).toEqual(t('def')); }); it('should join multiple regions with the default plaster string (". . .")', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); }); it('should join multiple regions with the current plaster string', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */', '/* #docplaster ... elided ... */', '/* #docregion A */', 'jkl', '/* #enddocregion A */', 'mno', '/* #docregion A */', 'pqr', '/* #enddocregion A */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); expect(output.regions['A']).toEqual(t('jkl', '/* ... elided ... */', 'pqr')); }); it('should parse multiple region names separated by commas', () => { const output = regionParser( t('/* #docregion , A, B */', 'abc', '/* #enddocregion B */', '/* #docregion C */', 'xyz', '/* #enddocregion A, C */', '123', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'xyz', '123')); expect(output.regions['A']).toEqual(t('abc', 'xyz')); expect(output.regions['B']).toEqual(t('abc')); expect(output.regions['C']).toEqual(t('xyz')); }) }); function t() { return Array.prototype.join.call(arguments, '\n'); }
aio/transforms/examples-package/services/region-parser.spec.js
1
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.9478139877319336, 0.09790406376123428, 0.00021626589295919985, 0.0013572818133980036, 0.26089945435523987 ]
{ "id": 1, "code_window": [ "\n", " it('should error if we attempt to open an already open region', () => {\n", " expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"\", that is already open (at line 2).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 98 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import 'reflect-metadata'; import * as ts from 'typescript'; import {createLanguageService} from '../src/language_service'; import {Hover} from '../src/types'; import {TypeScriptServiceHost} from '../src/typescript_host'; import {toh} from './test_data'; import {MockTypescriptHost} from './test_utils'; describe('hover', () => { let documentRegistry = ts.createDocumentRegistry(); let mockHost = new MockTypescriptHost(['/app/main.ts', '/app/parsing-cases.ts'], toh); let service = ts.createLanguageService(mockHost, documentRegistry); let ngHost = new TypeScriptServiceHost(mockHost, service); let ngService = createLanguageService(ngHost); ngHost.setSite(ngService); it('should be able to find field in an interpolation', () => { hover( ` @Component({template: '{{«name»}}'}) export class MyComponent { name: string; }`, 'property name of MyComponent'); }); it('should be able to find a field in a attribute reference', () => { hover( ` @Component({template: '<input [(ngModel)]="«name»">'}) export class MyComponent { name: string; }`, 'property name of MyComponent'); }); it('should be able to find a method from a call', () => { hover( ` @Component({template: '<div (click)="«∆myClick∆()»;"></div>'}) export class MyComponent { myClick() { }}`, 'method myClick of MyComponent'); }); it('should be able to find a field reference in an *ngIf', () => { hover( ` @Component({template: '<div *ngIf="«include»"></div>'}) export class MyComponent { include = true;}`, 'property include of MyComponent'); }); it('should be able to find a reference to a component', () => { hover( ` @Component({template: '«<∆test∆-comp></test-comp>»'}) export class MyComponent { }`, 'component TestComponent'); }); it('should be able to find an event provider', () => { hover( ` @Component({template: '<test-comp «(∆test∆)="myHandler()"»></div>'}) export class MyComponent { myHandler() {} }`, 'event testEvent of TestComponent'); }); it('should be able to find an input provider', () => { hover( ` @Component({template: '<test-comp «[∆tcName∆]="name"»></div>'}) export class MyComponent { name = 'my name'; }`, 'property name of TestComponent'); }); function hover(code: string, hoverText: string) { addCode(code, fileName => { let tests = 0; const markers = mockHost.getReferenceMarkers(fileName); const keys = Object.keys(markers.references).concat(Object.keys(markers.definitions)); for (const referenceName of keys) { const references = (markers.references[referenceName] || []).concat(markers.definitions[referenceName] || []); for (const reference of references) { tests++; const hover = ngService.getHoverAt(fileName, reference.start); if (!hover) throw new Error(`Expected a hover at location ${reference.start}`); expect(hover.span).toEqual(reference); expect(toText(hover)).toEqual(hoverText); } } expect(tests).toBeGreaterThan(0); // If this fails the test is wrong. }); } function addCode(code: string, cb: (fileName: string, content?: string) => void) { const fileName = '/app/app.component.ts'; const originalContent = mockHost.getFileContent(fileName); const newContent = originalContent + code; mockHost.override(fileName, originalContent + code); try { cb(fileName, newContent); } finally { mockHost.override(fileName, undefined); } } function toText(hover: Hover): string { return hover.text.map(h => h.text).join(''); } });
modules/@angular/language-service/test/hover_spec.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017678372387308627, 0.00017373412265442312, 0.0001673879160080105, 0.0001740684820106253, 0.0000025634371922933497 ]
{ "id": 1, "code_window": [ "\n", " it('should error if we attempt to open an already open region', () => {\n", " expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"\", that is already open (at line 2).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 98 }
#!/bin/bash set -e CPUPATH=/sys/devices/system/cpu WAKE_LOCK_NAME=ngperf set_governor() { echo "Setting CPU frequency governor to \"$1\"" adb shell 'for f in '$CPUPATH'/cpu*/cpufreq/scaling_governor ; do echo '$1' > $f; done' } wake_lock() { echo "Setting wake lock $WAKE_LOCK_NAME" adb shell "echo $WAKE_LOCK_NAME > /sys/power/wake_lock" } wake_unlock() { echo "Removing wake lock $WAKE_LOCK_NAME" adb shell "echo $WAKE_LOCK_NAME > /sys/power/wake_unlock" } case "$1" in (performance) set_governor "performance" ;; (powersave) set_governor "powersave" ;; (ondemand) set_governor "ondemand" ;; (wakelock) wake_lock ;; (wakeunlock) wake_unlock ;; (*) echo "Usage: $0 performance|powersave|ondemand|wakelock|wakeunlock" exit 1 ;; esac
scripts/ci/android_cpu.sh
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017836822371464223, 0.00017473625484853983, 0.0001718949351925403, 0.00017316560843028128, 0.000002862474730136455 ]
{ "id": 1, "code_window": [ "\n", " it('should error if we attempt to open an already open region', () => {\n", " expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"\", that is already open (at line 2).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 98 }
<!-- #docregion --> <div class={{class}}> {{type}}<br>{{path}} </div>
aio/content/examples/cb-component-relative-paths/ts/app/some.component.html
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00019844324560835958, 0.00019844324560835958, 0.00019844324560835958, 0.00019844324560835958, 0 ]
{ "id": 2, "code_window": [ " expect(\n", " () =>\n", " regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 2).');\n", " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 104 }
var testPackage = require('../../helpers/test-package'); var Dgeni = require('dgeni'); const testRegionMatcher = { regionStartMatcher: /^\s*\/\*\s*#docregion\s+(.*)\s*\*\/\s*$/, regionEndMatcher: /^\s*\/\*\s*#enddocregion\s+(.*)\s*\*\/\s*$/, plasterMatcher: /^\s*\/\*\s*#docplaster\s+(.*)\s*\*\/\s*$/, createPlasterComment: plaster => `/* ${plaster} */` }; describe('regionParser service', () => { var dgeni, injector, regionParser; beforeEach(function() { dgeni = new Dgeni([testPackage('examples-package', true)]); injector = dgeni.configureInjector(); regionParser = injector.get('regionParser'); regionParser.regionMatchers = {'test-type': testRegionMatcher}; }); it('should return just the contents if there is no region-matcher for the file type', () => { const output = regionParser('some contents', 'unknown'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should return just the contents if there is a region-matcher but no regions', () => { const output = regionParser('some contents', 'test-type'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should remove start region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove end region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */', '/* #enddocregion */'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove doc plaster annotations from the contents', () => { const output = regionParser(t('/* #docplaster ... elided ... */', 'abc', 'def', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should capture the rest of the contents for a region with no end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['X']).toEqual(t('def', 'ghi')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should capture the contents for a region up to the end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['']).toEqual(t('abc')); expect(output.regions['X']).toEqual(t('def')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should open a region with a null name if there is no region name', () => { const output = regionParser(t('/* #docregion */', 'abc', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual('abc'); }); it('should close the most recently opened region if there is no region name', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['Y']).toEqual(t('def')); }); it('should handle overlapping regions', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion X */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def')); expect(output.regions['Y']).toEqual(t('def', 'ghi')); }); it('should error if we attempt to open an already open region', () => { expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "", that is already open (at line 2).'); expect( () => regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "X", that is already open (at line 2).'); }); it('should error if we attempt to close an already closed region', () => { expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type')) .toThrowError('regionParser: Tried to close a region when none are open (at line 1).'); expect( () => regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to close a region, named "X", that is not open (at line 2).'); }); it('should handle whitespace in region names on single annotation', () => { const output = regionParser(t('/* #docregion A B*/', 'abc', '/* #docregion A C */', 'def'), 'test-type'); expect(output.regions['A B']).toEqual(t('abc', 'def')); expect(output.regions['A C']).toEqual(t('def')); }); it('should join multiple regions with the default plaster string (". . .")', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); }); it('should join multiple regions with the current plaster string', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */', '/* #docplaster ... elided ... */', '/* #docregion A */', 'jkl', '/* #enddocregion A */', 'mno', '/* #docregion A */', 'pqr', '/* #enddocregion A */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); expect(output.regions['A']).toEqual(t('jkl', '/* ... elided ... */', 'pqr')); }); it('should parse multiple region names separated by commas', () => { const output = regionParser( t('/* #docregion , A, B */', 'abc', '/* #enddocregion B */', '/* #docregion C */', 'xyz', '/* #enddocregion A, C */', '123', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'xyz', '123')); expect(output.regions['A']).toEqual(t('abc', 'xyz')); expect(output.regions['B']).toEqual(t('abc')); expect(output.regions['C']).toEqual(t('xyz')); }) }); function t() { return Array.prototype.join.call(arguments, '\n'); }
aio/transforms/examples-package/services/region-parser.spec.js
1
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.9954962730407715, 0.0894375592470169, 0.00023318121384363621, 0.0014186720363795757, 0.25216856598854065 ]
{ "id": 2, "code_window": [ " expect(\n", " () =>\n", " regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 2).');\n", " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 104 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {AfterContentChecked, AfterContentInit, AfterViewChecked, AfterViewInit, Component, DoCheck, Input, OnChanges, OnDestroy, OnInit, SimpleChanges, Type} from '@angular/core'; import {TestBed} from '@angular/core/testing'; export function main() { describe('lifecycle hooks examples', () => { it('should work with ngOnInit', () => { // #docregion OnInit @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements OnInit { ngOnInit() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngOnInit', []]]); }); it('should work with ngDoCheck', () => { // #docregion DoCheck @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements DoCheck { ngDoCheck() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngDoCheck', []]]); }); it('should work with ngAfterContentChecked', () => { // #docregion AfterContentChecked @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements AfterContentChecked { ngAfterContentChecked() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngAfterContentChecked', []]]); }); it('should work with ngAfterContentInit', () => { // #docregion AfterContentInit @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements AfterContentInit { ngAfterContentInit() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngAfterContentInit', []]]); }); it('should work with ngAfterViewChecked', () => { // #docregion AfterViewChecked @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements AfterViewChecked { ngAfterViewChecked() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngAfterViewChecked', []]]); }); it('should work with ngAfterViewInit', () => { // #docregion AfterViewInit @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements AfterViewInit { ngAfterViewInit() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngAfterViewInit', []]]); }); it('should work with ngOnDestroy', () => { // #docregion OnDestroy @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements OnDestroy { ngOnDestroy() { // ... } } // #enddocregion expect(createAndLogComponent(MyComponent)).toEqual([['ngOnDestroy', []]]); }); it('should work with ngOnChanges', () => { // #docregion OnChanges @Component({selector: 'my-cmp', template: `...`}) class MyComponent implements OnChanges { @Input() prop: number; ngOnChanges(changes: SimpleChanges) { // changes.prop contains the old and the new value... } } // #enddocregion const log = createAndLogComponent(MyComponent, ['prop']); expect(log.length).toBe(1); expect(log[0][0]).toBe('ngOnChanges'); const changes: SimpleChanges = log[0][1][0]; expect(changes['prop'].currentValue).toBe(true); }); }); function createAndLogComponent(clazz: Type<any>, inputs: string[] = []): any[] { const log: any[] = []; createLoggingSpiesFromProto(clazz, log); const inputBindings = inputs.map(input => `[${input}] = true`).join(' '); @Component({template: `<my-cmp ${inputBindings}></my-cmp>`}) class ParentComponent { } const fixture = TestBed.configureTestingModule({declarations: [ParentComponent, clazz]}) .createComponent(ParentComponent); fixture.detectChanges(); fixture.destroy(); return log; } function createLoggingSpiesFromProto(clazz: Type<any>, log: any[]) { const proto = clazz.prototype; Object.keys(proto).forEach((method) => { proto[method] = (...args: any[]) => { log.push([method, args]); }; }); } }
modules/@angular/examples/core/ts/metadata/lifecycle_hooks_spec.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.0004136041970923543, 0.0001890918065328151, 0.00016616373613942415, 0.00017338956240564585, 0.00005874706039321609 ]
{ "id": 2, "code_window": [ " expect(\n", " () =>\n", " regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 2).');\n", " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 104 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Inject, Injectable, Optional} from '@angular/core'; import {isBlank} from '../facade/lang'; import {Location} from './location'; import {APP_BASE_HREF, LocationStrategy} from './location_strategy'; import {LocationChangeListener, PlatformLocation} from './platform_location'; /** * @whatItDoes Use URL for storing application location data. * @description * `PathLocationStrategy` is a {@link LocationStrategy} used to configure the * {@link Location} service to represent its state in the * [path](https://en.wikipedia.org/wiki/Uniform_Resource_Locator#Syntax) of the * browser's URL. * * If you're using `PathLocationStrategy`, you must provide a {@link APP_BASE_HREF} * or add a base element to the document. This URL prefix that will be preserved * when generating and recognizing URLs. * * For instance, if you provide an `APP_BASE_HREF` of `'/my/app'` and call * `location.go('/foo')`, the browser's URL will become * `example.com/my/app/foo`. * * Similarly, if you add `<base href='/my/app'/>` to the document and call * `location.go('/foo')`, the browser's URL will become * `example.com/my/app/foo`. * * ### Example * * {@example common/location/ts/path_location_component.ts region='LocationComponent'} * * @stable */ @Injectable() export class PathLocationStrategy extends LocationStrategy { private _baseHref: string; constructor( private _platformLocation: PlatformLocation, @Optional() @Inject(APP_BASE_HREF) href?: string) { super(); if (isBlank(href)) { href = this._platformLocation.getBaseHrefFromDOM(); } if (isBlank(href)) { throw new Error( `No base href set. Please provide a value for the APP_BASE_HREF token or add a base element to the document.`); } this._baseHref = href; } onPopState(fn: LocationChangeListener): void { this._platformLocation.onPopState(fn); this._platformLocation.onHashChange(fn); } getBaseHref(): string { return this._baseHref; } prepareExternalUrl(internal: string): string { return Location.joinWithSlash(this._baseHref, internal); } path(includeHash: boolean = false): string { const pathname = this._platformLocation.pathname + Location.normalizeQueryParams(this._platformLocation.search); const hash = this._platformLocation.hash; return hash && includeHash ? `${pathname}${hash}` : pathname; } pushState(state: any, title: string, url: string, queryParams: string) { const externalUrl = this.prepareExternalUrl(url + Location.normalizeQueryParams(queryParams)); this._platformLocation.pushState(state, title, externalUrl); } replaceState(state: any, title: string, url: string, queryParams: string) { const externalUrl = this.prepareExternalUrl(url + Location.normalizeQueryParams(queryParams)); this._platformLocation.replaceState(state, title, externalUrl); } forward(): void { this._platformLocation.forward(); } back(): void { this._platformLocation.back(); } }
modules/@angular/common/src/location/path_location_strategy.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017636803386267275, 0.00017054476484190673, 0.00016218384553212672, 0.00017034818301908672, 0.000003865020516968798 ]
{ "id": 2, "code_window": [ " expect(\n", " () =>\n", " regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 2).');\n", " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to open a region, named \"X\", that is already open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 104 }
export var createElement: Function; export var render: Function; export var createClass: Function;
modules/benchmarks_external/src/tree/react/react.min.d.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017741664487402886, 0.00017741664487402886, 0.00017741664487402886, 0.00017741664487402886, 0 ]
{ "id": 3, "code_window": [ " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n", " expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type'))\n", " .toThrowError('regionParser: Tried to close a region when none are open (at line 1).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " .toThrowError('regionParser: Tried to close a region when none are open (at line 2).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 109 }
var testPackage = require('../../helpers/test-package'); var Dgeni = require('dgeni'); const testRegionMatcher = { regionStartMatcher: /^\s*\/\*\s*#docregion\s+(.*)\s*\*\/\s*$/, regionEndMatcher: /^\s*\/\*\s*#enddocregion\s+(.*)\s*\*\/\s*$/, plasterMatcher: /^\s*\/\*\s*#docplaster\s+(.*)\s*\*\/\s*$/, createPlasterComment: plaster => `/* ${plaster} */` }; describe('regionParser service', () => { var dgeni, injector, regionParser; beforeEach(function() { dgeni = new Dgeni([testPackage('examples-package', true)]); injector = dgeni.configureInjector(); regionParser = injector.get('regionParser'); regionParser.regionMatchers = {'test-type': testRegionMatcher}; }); it('should return just the contents if there is no region-matcher for the file type', () => { const output = regionParser('some contents', 'unknown'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should return just the contents if there is a region-matcher but no regions', () => { const output = regionParser('some contents', 'test-type'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should remove start region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove end region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */', '/* #enddocregion */'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove doc plaster annotations from the contents', () => { const output = regionParser(t('/* #docplaster ... elided ... */', 'abc', 'def', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should capture the rest of the contents for a region with no end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['X']).toEqual(t('def', 'ghi')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should capture the contents for a region up to the end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['']).toEqual(t('abc')); expect(output.regions['X']).toEqual(t('def')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should open a region with a null name if there is no region name', () => { const output = regionParser(t('/* #docregion */', 'abc', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual('abc'); }); it('should close the most recently opened region if there is no region name', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['Y']).toEqual(t('def')); }); it('should handle overlapping regions', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion X */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def')); expect(output.regions['Y']).toEqual(t('def', 'ghi')); }); it('should error if we attempt to open an already open region', () => { expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "", that is already open (at line 2).'); expect( () => regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "X", that is already open (at line 2).'); }); it('should error if we attempt to close an already closed region', () => { expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type')) .toThrowError('regionParser: Tried to close a region when none are open (at line 1).'); expect( () => regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to close a region, named "X", that is not open (at line 2).'); }); it('should handle whitespace in region names on single annotation', () => { const output = regionParser(t('/* #docregion A B*/', 'abc', '/* #docregion A C */', 'def'), 'test-type'); expect(output.regions['A B']).toEqual(t('abc', 'def')); expect(output.regions['A C']).toEqual(t('def')); }); it('should join multiple regions with the default plaster string (". . .")', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); }); it('should join multiple regions with the current plaster string', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */', '/* #docplaster ... elided ... */', '/* #docregion A */', 'jkl', '/* #enddocregion A */', 'mno', '/* #docregion A */', 'pqr', '/* #enddocregion A */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); expect(output.regions['A']).toEqual(t('jkl', '/* ... elided ... */', 'pqr')); }); it('should parse multiple region names separated by commas', () => { const output = regionParser( t('/* #docregion , A, B */', 'abc', '/* #enddocregion B */', '/* #docregion C */', 'xyz', '/* #enddocregion A, C */', '123', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'xyz', '123')); expect(output.regions['A']).toEqual(t('abc', 'xyz')); expect(output.regions['B']).toEqual(t('abc')); expect(output.regions['C']).toEqual(t('xyz')); }) }); function t() { return Array.prototype.join.call(arguments, '\n'); }
aio/transforms/examples-package/services/region-parser.spec.js
1
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.9880771636962891, 0.0657745972275734, 0.00026730180252343416, 0.0014323925133794546, 0.23823651671409607 ]
{ "id": 3, "code_window": [ " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n", " expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type'))\n", " .toThrowError('regionParser: Tried to close a region when none are open (at line 1).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " .toThrowError('regionParser: Tried to close a region when none are open (at line 2).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 109 }
<header class="hero background-sky"><h1 class="hero-title ">Events</h1> <div class="clear"></div> <h2 class="hero-subtitle">Where we'll be presenting</h2> </header> <article class="l-content "> <table class="is-full-width"> <thead> <tr> <th>Event</th> <th>Location</th> <th>Date</th> </tr> </thead> <tbody><!-- Devoxx Belgium --> <tr> <th><a target="_blank" href="https://www.devoxx.be/">Devoxx Belgium</a></th> <td>Antwerp, Belgium</td> <td>Nov. 7-11, 2016</td> </tr><!-- DEVIntersection --> <tr> <th><a target="_blank" href="https://www.devintersectioneurope.com/">DEVintersection Europe</a></th> <td>Amsterdam, Netherlands</td> <td>Nov 14-16, 2016</td> </tr><!-- dotJS --> <tr> <th><a target="_blank" href="http://www.dotjs.io/">dotJS</a></th> <td>Paris, France</td> <td>Dec. 5, 2016</td> </tr><!-- NG-BE --> <tr> <th><a target="_blank" href="https://ng-be.org/">NG-BE</a></th> <td>Ghent, Belgium</td> <td>Dec. 9, 2016</td> </tr> </tbody> </table> </article>
aio/src/content/events.html
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017905457934830338, 0.0001766989880707115, 0.00017392630979884416, 0.000176907517015934, 0.0000018380983419774566 ]
{ "id": 3, "code_window": [ " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n", " expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type'))\n", " .toThrowError('regionParser: Tried to close a region when none are open (at line 1).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " .toThrowError('regionParser: Tried to close a region when none are open (at line 2).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 109 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {ActivatedRoute} from './router_state'; import {PRIMARY_OUTLET, Params} from './shared'; import {UrlSegment, UrlSegmentGroup, UrlTree} from './url_tree'; import {forEach, last, shallowEqual} from './utils/collection'; export function createUrlTree( route: ActivatedRoute, urlTree: UrlTree, commands: any[], queryParams: Params, fragment: string): UrlTree { if (commands.length === 0) { return tree(urlTree.root, urlTree.root, urlTree, queryParams, fragment); } const nav = computeNavigation(commands); if (nav.toRoot()) { return tree(urlTree.root, new UrlSegmentGroup([], {}), urlTree, queryParams, fragment); } const startingPosition = findStartingPosition(nav, urlTree, route); const segmentGroup = startingPosition.processChildren ? updateSegmentGroupChildren( startingPosition.segmentGroup, startingPosition.index, nav.commands) : updateSegmentGroup(startingPosition.segmentGroup, startingPosition.index, nav.commands); return tree(startingPosition.segmentGroup, segmentGroup, urlTree, queryParams, fragment); } function isMatrixParams(command: any): boolean { return typeof command === 'object' && command != null && !command.outlets && !command.segmentPath; } function tree( oldSegmentGroup: UrlSegmentGroup, newSegmentGroup: UrlSegmentGroup, urlTree: UrlTree, queryParams: Params, fragment: string): UrlTree { if (urlTree.root === oldSegmentGroup) { return new UrlTree(newSegmentGroup, stringify(queryParams), fragment); } return new UrlTree( replaceSegment(urlTree.root, oldSegmentGroup, newSegmentGroup), stringify(queryParams), fragment); } function replaceSegment( current: UrlSegmentGroup, oldSegment: UrlSegmentGroup, newSegment: UrlSegmentGroup): UrlSegmentGroup { const children: {[key: string]: UrlSegmentGroup} = {}; forEach(current.children, (c: UrlSegmentGroup, outletName: string) => { if (c === oldSegment) { children[outletName] = newSegment; } else { children[outletName] = replaceSegment(c, oldSegment, newSegment); } }); return new UrlSegmentGroup(current.segments, children); } class Navigation { constructor( public isAbsolute: boolean, public numberOfDoubleDots: number, public commands: any[]) { if (isAbsolute && commands.length > 0 && isMatrixParams(commands[0])) { throw new Error('Root segment cannot have matrix parameters'); } const cmdWithOutlet = commands.find(c => typeof c === 'object' && c != null && c.outlets); if (cmdWithOutlet && cmdWithOutlet !== last(commands)) { throw new Error('{outlets:{}} has to be the last command'); } } public toRoot(): boolean { return this.isAbsolute && this.commands.length === 1 && this.commands[0] == '/'; } } /** Transforms commands to a normalized `Navigation` */ function computeNavigation(commands: any[]): Navigation { if ((typeof commands[0] === 'string') && commands.length === 1 && commands[0] === '/') { return new Navigation(true, 0, commands); } let numberOfDoubleDots = 0; let isAbsolute = false; const res: any[] = commands.reduce((res, cmd, cmdIdx) => { if (typeof cmd === 'object' && cmd != null) { if (cmd.outlets) { const outlets: {[k: string]: any} = {}; forEach(cmd.outlets, (commands: any, name: string) => { outlets[name] = typeof commands === 'string' ? commands.split('/') : commands; }); return [...res, {outlets}]; } if (cmd.segmentPath) { return [...res, cmd.segmentPath]; } } if (!(typeof cmd === 'string')) { return [...res, cmd]; } if (cmdIdx === 0) { cmd.split('/').forEach((urlPart, partIndex) => { if (partIndex == 0 && urlPart === '.') { // skip './a' } else if (partIndex == 0 && urlPart === '') { // '/a' isAbsolute = true; } else if (urlPart === '..') { // '../a' numberOfDoubleDots++; } else if (urlPart != '') { res.push(urlPart); } }); return res; } return [...res, cmd]; }, []); return new Navigation(isAbsolute, numberOfDoubleDots, res); } class Position { constructor( public segmentGroup: UrlSegmentGroup, public processChildren: boolean, public index: number) { } } function findStartingPosition(nav: Navigation, tree: UrlTree, route: ActivatedRoute): Position { if (nav.isAbsolute) { return new Position(tree.root, true, 0); } if (route.snapshot._lastPathIndex === -1) { return new Position(route.snapshot._urlSegment, true, 0); } const modifier = isMatrixParams(nav.commands[0]) ? 0 : 1; const index = route.snapshot._lastPathIndex + modifier; return createPositionApplyingDoubleDots( route.snapshot._urlSegment, index, nav.numberOfDoubleDots); } function createPositionApplyingDoubleDots( group: UrlSegmentGroup, index: number, numberOfDoubleDots: number): Position { let g = group; let ci = index; let dd = numberOfDoubleDots; while (dd > ci) { dd -= ci; g = g.parent; if (!g) { throw new Error('Invalid number of \'../\''); } ci = g.segments.length; } return new Position(g, false, ci - dd); } function getPath(command: any): any { if (typeof command === 'object' && command != null && command.outlets) { return command.outlets[PRIMARY_OUTLET]; } return `${command}`; } function getOutlets(commands: any[]): {[k: string]: any[]} { if (!(typeof commands[0] === 'object')) return {[PRIMARY_OUTLET]: commands}; if (commands[0].outlets === undefined) return {[PRIMARY_OUTLET]: commands}; return commands[0].outlets; } function updateSegmentGroup( segmentGroup: UrlSegmentGroup, startIndex: number, commands: any[]): UrlSegmentGroup { if (!segmentGroup) { segmentGroup = new UrlSegmentGroup([], {}); } if (segmentGroup.segments.length === 0 && segmentGroup.hasChildren()) { return updateSegmentGroupChildren(segmentGroup, startIndex, commands); } const m = prefixedWith(segmentGroup, startIndex, commands); const slicedCommands = commands.slice(m.commandIndex); if (m.match && m.pathIndex < segmentGroup.segments.length) { const g = new UrlSegmentGroup(segmentGroup.segments.slice(0, m.pathIndex), {}); g.children[PRIMARY_OUTLET] = new UrlSegmentGroup(segmentGroup.segments.slice(m.pathIndex), segmentGroup.children); return updateSegmentGroupChildren(g, 0, slicedCommands); } else if (m.match && slicedCommands.length === 0) { return new UrlSegmentGroup(segmentGroup.segments, {}); } else if (m.match && !segmentGroup.hasChildren()) { return createNewSegmentGroup(segmentGroup, startIndex, commands); } else if (m.match) { return updateSegmentGroupChildren(segmentGroup, 0, slicedCommands); } else { return createNewSegmentGroup(segmentGroup, startIndex, commands); } } function updateSegmentGroupChildren( segmentGroup: UrlSegmentGroup, startIndex: number, commands: any[]): UrlSegmentGroup { if (commands.length === 0) { return new UrlSegmentGroup(segmentGroup.segments, {}); } else { const outlets = getOutlets(commands); const children: {[key: string]: UrlSegmentGroup} = {}; forEach(outlets, (commands: any, outlet: string) => { if (commands !== null) { children[outlet] = updateSegmentGroup(segmentGroup.children[outlet], startIndex, commands); } }); forEach(segmentGroup.children, (child: UrlSegmentGroup, childOutlet: string) => { if (outlets[childOutlet] === undefined) { children[childOutlet] = child; } }); return new UrlSegmentGroup(segmentGroup.segments, children); } } function prefixedWith(segmentGroup: UrlSegmentGroup, startIndex: number, commands: any[]) { let currentCommandIndex = 0; let currentPathIndex = startIndex; const noMatch = {match: false, pathIndex: 0, commandIndex: 0}; while (currentPathIndex < segmentGroup.segments.length) { if (currentCommandIndex >= commands.length) return noMatch; const path = segmentGroup.segments[currentPathIndex]; const curr = getPath(commands[currentCommandIndex]); const next = currentCommandIndex < commands.length - 1 ? commands[currentCommandIndex + 1] : null; if (currentPathIndex > 0 && curr === undefined) break; if (curr && next && (typeof next === 'object') && next.outlets === undefined) { if (!compare(curr, next, path)) return noMatch; currentCommandIndex += 2; } else { if (!compare(curr, {}, path)) return noMatch; currentCommandIndex++; } currentPathIndex++; } return {match: true, pathIndex: currentPathIndex, commandIndex: currentCommandIndex}; } function createNewSegmentGroup( segmentGroup: UrlSegmentGroup, startIndex: number, commands: any[]): UrlSegmentGroup { const paths = segmentGroup.segments.slice(0, startIndex); let i = 0; while (i < commands.length) { if (typeof commands[i] === 'object' && commands[i].outlets !== undefined) { const children = createNewSegmentChildren(commands[i].outlets); return new UrlSegmentGroup(paths, children); } // if we start with an object literal, we need to reuse the path part from the segment if (i === 0 && isMatrixParams(commands[0])) { const p = segmentGroup.segments[startIndex]; paths.push(new UrlSegment(p.path, commands[0])); i++; continue; } const curr = getPath(commands[i]); const next = (i < commands.length - 1) ? commands[i + 1] : null; if (curr && next && isMatrixParams(next)) { paths.push(new UrlSegment(curr, stringify(next))); i += 2; } else { paths.push(new UrlSegment(curr, {})); i++; } } return new UrlSegmentGroup(paths, {}); } function createNewSegmentChildren(outlets: {[name: string]: any}): any { const children: {[key: string]: UrlSegmentGroup} = {}; forEach(outlets, (commands: any, outlet: string) => { if (commands !== null) { children[outlet] = createNewSegmentGroup(new UrlSegmentGroup([], {}), 0, commands); } }); return children; } function stringify(params: {[key: string]: any}): {[key: string]: string} { const res: {[key: string]: string} = {}; forEach(params, (v: any, k: string) => res[k] = `${v}`); return res; } function compare(path: string, params: {[key: string]: any}, segment: UrlSegment): boolean { return path == segment.path && shallowEqual(params, segment.parameters); }
modules/@angular/router/src/create_url_tree.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017845821275841445, 0.0001743850007187575, 0.00016815142589621246, 0.00017470153397880495, 0.0000021994269445713144 ]
{ "id": 3, "code_window": [ " });\n", "\n", " it('should error if we attempt to close an already closed region', () => {\n", " expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type'))\n", " .toThrowError('regionParser: Tried to close a region when none are open (at line 1).');\n", "\n", " expect(\n", " () =>\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " .toThrowError('regionParser: Tried to close a region when none are open (at line 2).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 109 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {AsyncTestCompleter, describe, expect, inject, it} from '@angular/core/testing/testing_internal'; import {MeasureValues, Metric, Options, ReflectiveInjector, Reporter, Sampler, Validator, WebDriverAdapter} from '../index'; import {isBlank, isPresent} from '../src/facade/lang'; export function main() { const EMPTY_EXECUTE = () => {}; describe('sampler', () => { let sampler: Sampler; function createSampler({driver, metric, reporter, validator, prepare, execute}: { driver?: any, metric?: Metric, reporter?: Reporter, validator?: Validator, prepare?: any, execute?: any } = {}) { let time = 1000; if (!metric) { metric = new MockMetric([]); } if (!reporter) { reporter = new MockReporter([]); } if (isBlank(driver)) { driver = new MockDriverAdapter([]); } const providers = [ Options.DEFAULT_PROVIDERS, Sampler.PROVIDERS, {provide: Metric, useValue: metric}, {provide: Reporter, useValue: reporter}, {provide: WebDriverAdapter, useValue: driver}, {provide: Options.EXECUTE, useValue: execute}, {provide: Validator, useValue: validator}, {provide: Options.NOW, useValue: () => new Date(time++)} ]; if (isPresent(prepare)) { providers.push({provide: Options.PREPARE, useValue: prepare}); } sampler = ReflectiveInjector.resolveAndCreate(providers).get(Sampler); } it('should call the prepare and execute callbacks using WebDriverAdapter.waitFor', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const log: any[] = []; let count = 0; const driver = new MockDriverAdapter([], (callback: Function) => { const result = callback(); log.push(result); return Promise.resolve(result); }); createSampler({ driver: driver, validator: createCountingValidator(2), prepare: () => count++, execute: () => count++, }); sampler.sample().then((_) => { expect(count).toBe(4); expect(log).toEqual([0, 1, 2, 3]); async.done(); }); })); it('should call prepare, beginMeasure, execute, endMeasure for every iteration', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { let workCount = 0; const log: any[] = []; createSampler({ metric: createCountingMetric(log), validator: createCountingValidator(2), prepare: () => { log.push(`p${workCount++}`); }, execute: () => { log.push(`w${workCount++}`); } }); sampler.sample().then((_) => { expect(log).toEqual([ 'p0', ['beginMeasure'], 'w1', ['endMeasure', false, {'script': 0}], 'p2', ['beginMeasure'], 'w3', ['endMeasure', false, {'script': 1}], ]); async.done(); }); })); it('should call execute, endMeasure for every iteration if there is no prepare callback', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const log: any[] = []; let workCount = 0; createSampler({ metric: createCountingMetric(log), validator: createCountingValidator(2), execute: () => { log.push(`w${workCount++}`); }, prepare: null }); sampler.sample().then((_) => { expect(log).toEqual([ ['beginMeasure'], 'w0', ['endMeasure', true, {'script': 0}], 'w1', ['endMeasure', true, {'script': 1}], ]); async.done(); }); })); it('should only collect metrics for execute and ignore metrics from prepare', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { let scriptTime = 0; let iterationCount = 1; createSampler({ validator: createCountingValidator(2), metric: new MockMetric( [], () => { const result = Promise.resolve({'script': scriptTime}); scriptTime = 0; return result; }), prepare: () => { scriptTime = 1 * iterationCount; }, execute: () => { scriptTime = 10 * iterationCount; iterationCount++; } }); sampler.sample().then((state) => { expect(state.completeSample.length).toBe(2); expect(state.completeSample[0]).toEqual(mv(0, 1000, {'script': 10})); expect(state.completeSample[1]).toEqual(mv(1, 1001, {'script': 20})); async.done(); }); })); it('should call the validator for every execution and store the valid sample', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const log: any[] = []; const validSample = [mv(null, null, {})]; createSampler({ metric: createCountingMetric(), validator: createCountingValidator(2, validSample, log), execute: EMPTY_EXECUTE }); sampler.sample().then((state) => { expect(state.validSample).toBe(validSample); // TODO(tbosch): Why does this fail?? // expect(log).toEqual([ // ['validate', [{'script': 0}], null], // ['validate', [{'script': 0}, {'script': 1}], validSample] // ]); expect(log.length).toBe(2); expect(log[0]).toEqual(['validate', [mv(0, 1000, {'script': 0})], null]); expect(log[1]).toEqual( ['validate', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample]); async.done(); }); })); it('should report the metric values', inject([AsyncTestCompleter], (async: AsyncTestCompleter) => { const log: any[] = []; const validSample = [mv(null, null, {})]; createSampler({ validator: createCountingValidator(2, validSample), metric: createCountingMetric(), reporter: new MockReporter(log), execute: EMPTY_EXECUTE }); sampler.sample().then((_) => { // TODO(tbosch): Why does this fail?? // expect(log).toEqual([ // ['reportMeasureValues', 0, {'script': 0}], // ['reportMeasureValues', 1, {'script': 1}], // ['reportSample', [{'script': 0}, {'script': 1}], validSample] // ]); expect(log.length).toBe(3); expect(log[0]).toEqual(['reportMeasureValues', mv(0, 1000, {'script': 0})]); expect(log[1]).toEqual(['reportMeasureValues', mv(1, 1001, {'script': 1})]); expect(log[2]).toEqual([ 'reportSample', [mv(0, 1000, {'script': 0}), mv(1, 1001, {'script': 1})], validSample ]); async.done(); }); })); }); } function mv(runIndex: number, time: number, values: {[key: string]: number}) { return new MeasureValues(runIndex, new Date(time), values); } function createCountingValidator( count: number, validSample: MeasureValues[] = null, log: any[] = []) { return new MockValidator(log, (completeSample: MeasureValues[]) => { count--; if (count === 0) { return validSample || completeSample; } else { return null; } }); } function createCountingMetric(log: any[] = []) { let scriptTime = 0; return new MockMetric(log, () => ({'script': scriptTime++})); } class MockDriverAdapter extends WebDriverAdapter { constructor(private _log: any[] = [], private _waitFor: Function = null) { super(); } waitFor(callback: Function): Promise<any> { if (isPresent(this._waitFor)) { return this._waitFor(callback); } else { return Promise.resolve(callback()); } } } class MockValidator extends Validator { constructor(private _log: any[] = [], private _validate: Function = null) { super(); } validate(completeSample: MeasureValues[]): MeasureValues[] { const stableSample = isPresent(this._validate) ? this._validate(completeSample) : completeSample; this._log.push(['validate', completeSample, stableSample]); return stableSample; } } class MockMetric extends Metric { constructor(private _log: any[] = [], private _endMeasure: Function = null) { super(); } beginMeasure() { this._log.push(['beginMeasure']); return Promise.resolve(null); } endMeasure(restart: boolean) { const measureValues = isPresent(this._endMeasure) ? this._endMeasure() : {}; this._log.push(['endMeasure', restart, measureValues]); return Promise.resolve(measureValues); } } class MockReporter extends Reporter { constructor(private _log: any[] = []) { super(); } reportMeasureValues(values: MeasureValues): Promise<any> { this._log.push(['reportMeasureValues', values]); return Promise.resolve(null); } reportSample(completeSample: MeasureValues[], validSample: MeasureValues[]): Promise<any> { this._log.push(['reportSample', completeSample, validSample]); return Promise.resolve(null); } }
modules/@angular/benchpress/test/sampler_spec.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017876509809866548, 0.00017323889187537134, 0.00016218739619944245, 0.0001743780157994479, 0.0000036316539535619086 ]
{ "id": 4, "code_window": [ "\n", " expect(\n", " () =>\n", " regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 2).');\n", " });\n", "\n", " it('should handle whitespace in region names on single annotation', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 115 }
var testPackage = require('../../helpers/test-package'); var Dgeni = require('dgeni'); const testRegionMatcher = { regionStartMatcher: /^\s*\/\*\s*#docregion\s+(.*)\s*\*\/\s*$/, regionEndMatcher: /^\s*\/\*\s*#enddocregion\s+(.*)\s*\*\/\s*$/, plasterMatcher: /^\s*\/\*\s*#docplaster\s+(.*)\s*\*\/\s*$/, createPlasterComment: plaster => `/* ${plaster} */` }; describe('regionParser service', () => { var dgeni, injector, regionParser; beforeEach(function() { dgeni = new Dgeni([testPackage('examples-package', true)]); injector = dgeni.configureInjector(); regionParser = injector.get('regionParser'); regionParser.regionMatchers = {'test-type': testRegionMatcher}; }); it('should return just the contents if there is no region-matcher for the file type', () => { const output = regionParser('some contents', 'unknown'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should return just the contents if there is a region-matcher but no regions', () => { const output = regionParser('some contents', 'test-type'); expect(output).toEqual({contents: 'some contents', regions: {}}); }); it('should remove start region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove end region annotations from the contents', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */', '/* #enddocregion */'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should remove doc plaster annotations from the contents', () => { const output = regionParser(t('/* #docplaster ... elided ... */', 'abc', 'def', 'ghi'), 'test-type'); expect(output.contents).toEqual(t('abc', 'def', 'ghi')); }); it('should capture the rest of the contents for a region with no end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #docregion X */', 'def', '/* #docregion Y */', 'ghi'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['X']).toEqual(t('def', 'ghi')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should capture the contents for a region up to the end region annotation', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', '/* #docregion X */', 'def', '/* #enddocregion X */', '/* #docregion Y */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['']).toEqual(t('abc')); expect(output.regions['X']).toEqual(t('def')); expect(output.regions['Y']).toEqual(t('ghi')); }); it('should open a region with a null name if there is no region name', () => { const output = regionParser(t('/* #docregion */', 'abc', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual('abc'); }); it('should close the most recently opened region if there is no region name', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def', 'ghi')); expect(output.regions['Y']).toEqual(t('def')); }); it('should handle overlapping regions', () => { const output = regionParser( t('/* #docregion X*/', 'abc', '/* #docregion Y */', 'def', '/* #enddocregion X */', 'ghi', '/* #enddocregion Y */'), 'test-type'); expect(output.regions['X']).toEqual(t('abc', 'def')); expect(output.regions['Y']).toEqual(t('def', 'ghi')); }); it('should error if we attempt to open an already open region', () => { expect(() => regionParser(t('/* #docregion */', 'abc', '/* #docregion */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "", that is already open (at line 2).'); expect( () => regionParser(t('/* #docregion X */', 'abc', '/* #docregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to open a region, named "X", that is already open (at line 2).'); }); it('should error if we attempt to close an already closed region', () => { expect(() => regionParser(t('abc', '/* #enddocregion */', 'def'), 'test-type')) .toThrowError('regionParser: Tried to close a region when none are open (at line 1).'); expect( () => regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type')) .toThrowError( 'regionParser: Tried to close a region, named "X", that is not open (at line 2).'); }); it('should handle whitespace in region names on single annotation', () => { const output = regionParser(t('/* #docregion A B*/', 'abc', '/* #docregion A C */', 'def'), 'test-type'); expect(output.regions['A B']).toEqual(t('abc', 'def')); expect(output.regions['A C']).toEqual(t('def')); }); it('should join multiple regions with the default plaster string (". . .")', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); }); it('should join multiple regions with the current plaster string', () => { const output = regionParser( t('/* #docregion */', 'abc', '/* #enddocregion */', 'def', '/* #docregion */', 'ghi', '/* #enddocregion */', '/* #docplaster ... elided ... */', '/* #docregion A */', 'jkl', '/* #enddocregion A */', 'mno', '/* #docregion A */', 'pqr', '/* #enddocregion A */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', '/* . . . */', 'ghi')); expect(output.regions['A']).toEqual(t('jkl', '/* ... elided ... */', 'pqr')); }); it('should parse multiple region names separated by commas', () => { const output = regionParser( t('/* #docregion , A, B */', 'abc', '/* #enddocregion B */', '/* #docregion C */', 'xyz', '/* #enddocregion A, C */', '123', '/* #enddocregion */'), 'test-type'); expect(output.regions['']).toEqual(t('abc', 'xyz', '123')); expect(output.regions['A']).toEqual(t('abc', 'xyz')); expect(output.regions['B']).toEqual(t('abc')); expect(output.regions['C']).toEqual(t('xyz')); }) }); function t() { return Array.prototype.join.call(arguments, '\n'); }
aio/transforms/examples-package/services/region-parser.spec.js
1
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.9957565665245056, 0.12537449598312378, 0.0002490598417352885, 0.002364520914852619, 0.32415634393692017 ]
{ "id": 4, "code_window": [ "\n", " expect(\n", " () =>\n", " regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 2).');\n", " });\n", "\n", " it('should handle whitespace in region names on single annotation', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 115 }
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {beforeEach, describe, expect, inject, it} from '../../../core/testing/testing_internal'; import {Element} from '../../src/ml_parser/ast'; import {HtmlParser} from '../../src/ml_parser/html_parser'; import {PreparsedElement, PreparsedElementType, preparseElement} from '../../src/template_parser/template_preparser'; export function main() { describe('preparseElement', () => { let htmlParser: HtmlParser; beforeEach(inject([HtmlParser], (_htmlParser: HtmlParser) => { htmlParser = _htmlParser; })); function preparse(html: string): PreparsedElement { return preparseElement(htmlParser.parse(html, 'TestComp').rootNodes[0] as Element); } it('should detect script elements', inject([HtmlParser], (htmlParser: HtmlParser) => { expect(preparse('<script>').type).toBe(PreparsedElementType.SCRIPT); })); it('should detect style elements', inject([HtmlParser], (htmlParser: HtmlParser) => { expect(preparse('<style>').type).toBe(PreparsedElementType.STYLE); })); it('should detect stylesheet elements', inject([HtmlParser], (htmlParser: HtmlParser) => { expect(preparse('<link rel="stylesheet">').type).toBe(PreparsedElementType.STYLESHEET); expect(preparse('<link rel="stylesheet" href="someUrl">').hrefAttr).toEqual('someUrl'); expect(preparse('<link rel="someRel">').type).toBe(PreparsedElementType.OTHER); })); it('should detect ng-content elements', inject([HtmlParser], (htmlParser: HtmlParser) => { expect(preparse('<ng-content>').type).toBe(PreparsedElementType.NG_CONTENT); })); it('should normalize ng-content.select attribute', inject([HtmlParser], (htmlParser: HtmlParser) => { expect(preparse('<ng-content>').selectAttr).toEqual('*'); expect(preparse('<ng-content select>').selectAttr).toEqual('*'); expect(preparse('<ng-content select="*">').selectAttr).toEqual('*'); })); it('should extract ngProjectAs value', () => { expect(preparse('<p ngProjectAs="el[attr].class"></p>').projectAs).toEqual('el[attr].class'); }); }); }
modules/@angular/compiler/test/template_parser/template_preparser_spec.ts
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017518342065159231, 0.00017218048742506653, 0.00016657715605106205, 0.00017279094026889652, 0.0000028523643322841963 ]
{ "id": 4, "code_window": [ "\n", " expect(\n", " () =>\n", " regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 2).');\n", " });\n", "\n", " it('should handle whitespace in region names on single annotation', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 115 }
#!/usr/bin/env bash cd `dirname $0` while read RAW_PACKAGE || [[ -n "$RAW_PACKAGE" ]] do PACKAGE=${RAW_PACKAGE%$'\r'} DESTDIR=./../../modules/\@angular/${PACKAGE} rm ${DESTDIR}/facade mv ${DESTDIR}/facade.old ${DESTDIR}/facade done < packages.txt
scripts/windows/remove-symlinks.sh
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017559347907081246, 0.0001718435960356146, 0.00016809372755233198, 0.0001718435960356146, 0.00000374987575924024 ]
{ "id": 4, "code_window": [ "\n", " expect(\n", " () =>\n", " regionParser(t('/* #docregion */', 'abc', '/* #enddocregion X */', 'def'), 'test-type'))\n", " .toThrowError(\n", " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 2).');\n", " });\n", "\n", " it('should handle whitespace in region names on single annotation', () => {\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ " 'regionParser: Tried to close a region, named \"X\", that is not open (at line 3).');\n" ], "file_path": "aio/transforms/examples-package/services/region-parser.spec.js", "type": "replace", "edit_start_line_idx": 115 }
#!/bin/bash LOG_FILES=$LOGS_DIR/* for FILE in $LOG_FILES; do echo -e "\n\n\n" echo "================================================================================" echo 'travis_fold:start:cleanup.printfile' echo " $FILE" echo "================================================================================" cat $FILE echo 'travis_fold:end:cleanup.printfile' done
scripts/ci/print-logs.sh
0
https://github.com/angular/angular/commit/6a9251874b20bff5c3324c8e5a6a1757ff4a140b
[ 0.00017458712682127953, 0.00017294977442361414, 0.00017131242202594876, 0.00017294977442361414, 0.0000016373523976653814 ]
{ "id": 0, "code_window": [ "\t\t\tseparator: insertAsImage ? '\\n' : ' ',\n", "\t\t});\n", "\n", "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet) : undefined;\n", "\t}));\n", "\n", "\tconst edit = new vscode.WorkspaceEdit();\n", "\tedit.set(activeEditor.document.uri, snippetEdits);\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/commands/insertResource.ts", "type": "replace", "edit_start_line_idx": 87 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as path from 'path'; import * as vscode from 'vscode'; import * as URI from 'vscode-uri'; import { Schemes } from '../../util/schemes'; export const imageFileExtensions = new Set<string>([ 'bmp', 'gif', 'ico', 'jpe', 'jpeg', 'jpg', 'png', 'psd', 'svg', 'tga', 'tif', 'tiff', 'webp', ]); const videoFileExtensions = new Set<string>([ 'ogg', 'mp4' ]); export function registerDropIntoEditorSupport(selector: vscode.DocumentSelector) { return vscode.languages.registerDocumentDropEditProvider(selector, new class implements vscode.DocumentDropEditProvider { async provideDocumentDropEdits(document: vscode.TextDocument, _position: vscode.Position, dataTransfer: vscode.DataTransfer, token: vscode.CancellationToken): Promise<vscode.DocumentDropEdit | undefined> { const enabled = vscode.workspace.getConfiguration('markdown', document).get('editor.drop.enabled', true); if (!enabled) { return undefined; } const snippet = await tryGetUriListSnippet(document, dataTransfer, token); if (!snippet) { return undefined; } const edit = new vscode.DocumentDropEdit(snippet.snippet); edit.label = snippet.label; return edit; } }, { id: 'insertLink', dropMimeTypes: [ 'text/uri-list' ] }); } export async function tryGetUriListSnippet(document: vscode.TextDocument, dataTransfer: vscode.DataTransfer, token: vscode.CancellationToken): Promise<{ snippet: vscode.SnippetString; label: string } | undefined> { const urlList = await dataTransfer.get('text/uri-list')?.asString(); if (!urlList || token.isCancellationRequested) { return undefined; } const uris: vscode.Uri[] = []; for (const resource of urlList.split(/\r?\n/g)) { try { uris.push(vscode.Uri.parse(resource)); } catch { // noop } } const snippet = createUriListSnippet(document, uris); if (!snippet) { return undefined; } return { snippet: snippet, label: uris.length > 1 ? vscode.l10n.t('Insert uri links') : vscode.l10n.t('Insert uri link') }; } interface UriListSnippetOptions { readonly placeholderText?: string; readonly placeholderStartIndex?: number; /** * Should the snippet be for an image? * * If `undefined`, tries to infer this from the uri. */ readonly insertAsImage?: boolean; readonly separator?: string; } export function createUriListSnippet(document: vscode.TextDocument, uris: readonly vscode.Uri[], options?: UriListSnippetOptions): vscode.SnippetString | undefined { if (!uris.length) { return undefined; } const dir = getDocumentDir(document); const snippet = new vscode.SnippetString(); uris.forEach((uri, i) => { const mdPath = getMdPath(dir, uri); const ext = URI.Utils.extname(uri).toLowerCase().replace('.', ''); const insertAsImage = typeof options?.insertAsImage === 'undefined' ? imageFileExtensions.has(ext) : !!options.insertAsImage; const insertAsVideo = videoFileExtensions.has(ext); if (insertAsVideo) { snippet.appendText(`<video src="${mdPath}" controls title="`); snippet.appendPlaceholder('Title'); snippet.appendText('"></video>'); } else { snippet.appendText(insertAsImage ? '![' : '['); const placeholderText = options?.placeholderText ?? (insertAsImage ? 'Alt text' : 'label'); const placeholderIndex = typeof options?.placeholderStartIndex !== 'undefined' ? options?.placeholderStartIndex + i : undefined; snippet.appendPlaceholder(placeholderText, placeholderIndex); snippet.appendText(`](${mdPath})`); } if (i < uris.length - 1 && uris.length > 1) { snippet.appendText(options?.separator ?? ' '); } }); return snippet; } function getMdPath(dir: vscode.Uri | undefined, file: vscode.Uri) { if (dir && dir.scheme === file.scheme && dir.authority === file.authority) { if (file.scheme === Schemes.file) { // On windows, we must use the native `path.relative` to generate the relative path // so that drive-letters are resolved cast insensitively. However we then want to // convert back to a posix path to insert in to the document. const relativePath = path.relative(dir.fsPath, file.fsPath); return encodeURI(path.posix.normalize(relativePath.split(path.sep).join(path.posix.sep))); } return encodeURI(path.posix.relative(dir.path, file.path)); } return file.toString(false); } function getDocumentDir(document: vscode.TextDocument): vscode.Uri | undefined { const docUri = getParentDocumentUri(document); if (docUri.scheme === Schemes.untitled) { return vscode.workspace.workspaceFolders?.[0]?.uri; } return URI.Utils.dirname(docUri); } export function getParentDocumentUri(document: vscode.TextDocument): vscode.Uri { if (document.uri.scheme === Schemes.notebookCell) { for (const notebook of vscode.workspace.notebookDocuments) { for (const cell of notebook.getCells()) { if (cell.document === document) { return notebook.uri; } } } } return document.uri; }
extensions/markdown-language-features/src/languageFeatures/copyFiles/dropIntoEditor.ts
1
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.9882838726043701, 0.055707208812236786, 0.00017167643818538636, 0.0003226989065296948, 0.22618474066257477 ]
{ "id": 0, "code_window": [ "\t\t\tseparator: insertAsImage ? '\\n' : ' ',\n", "\t\t});\n", "\n", "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet) : undefined;\n", "\t}));\n", "\n", "\tconst edit = new vscode.WorkspaceEdit();\n", "\tedit.set(activeEditor.document.uri, snippetEdits);\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/commands/insertResource.ts", "type": "replace", "edit_start_line_idx": 87 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as assert from 'assert'; import { FileAccess } from 'vs/base/common/network'; import { LanguageService } from 'vs/editor/common/services/languageService'; import { TestNotificationService } from 'vs/platform/notification/test/common/testNotificationService'; import { GettingStartedDetailsRenderer } from 'vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedDetailsRenderer'; import { convertInternalMediaPathToFileURI } from 'vs/workbench/contrib/welcomeGettingStarted/browser/gettingStartedService'; import { TestFileService } from 'vs/workbench/test/browser/workbenchTestServices'; import { TestExtensionService } from 'vs/workbench/test/common/workbenchTestServices'; suite('Getting Started Markdown Renderer', () => { test('renders theme picker markdown with images', async () => { const fileService = new TestFileService(); const languageService = new LanguageService(); const renderer = new GettingStartedDetailsRenderer(fileService, new TestNotificationService(), new TestExtensionService(), languageService); const mdPath = convertInternalMediaPathToFileURI('theme_picker').with({ query: JSON.stringify({ moduleId: 'vs/workbench/contrib/welcomeGettingStarted/common/media/theme_picker' }) }); const mdBase = FileAccess.asFileUri('vs/workbench/contrib/welcomeGettingStarted/common/media/'); const rendered = await renderer.renderMarkdown(mdPath, mdBase); const imageSrcs = [...rendered.matchAll(/img src="[^"]*"/g)].map(match => match[0]); for (const src of imageSrcs) { const targetSrcFormat = /^img src="https:\/\/file\+.vscode-resource.vscode-cdn.net\/.*\/vs\/workbench\/contrib\/welcomeGettingStarted\/common\/media\/.*.png"$/; assert(targetSrcFormat.test(src), `${src} didnt match regex`); } languageService.dispose(); }); });
src/vs/workbench/contrib/welcomeGettingStarted/test/browser/gettingStartedMarkdownRenderer.test.ts
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.00018012913642451167, 0.00017469357408117503, 0.00016880076145753264, 0.00017492221377324313, 0.000004040954536321806 ]
{ "id": 0, "code_window": [ "\t\t\tseparator: insertAsImage ? '\\n' : ' ',\n", "\t\t});\n", "\n", "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet) : undefined;\n", "\t}));\n", "\n", "\tconst edit = new vscode.WorkspaceEdit();\n", "\tedit.set(activeEditor.document.uri, snippetEdits);\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/commands/insertResource.ts", "type": "replace", "edit_start_line_idx": 87 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { URI, UriDto } from 'vs/base/common/uri'; import { ContextKeyExpression } from 'vs/platform/contextkey/common/contextkey'; import { ThemeIcon } from 'vs/base/common/themables'; import { Categories } from './actionCommonCategories'; export interface ILocalizedString { /** * The localized value of the string. */ value: string; /** * The original (non localized value of the string) */ original: string; } export interface ICommandActionTitle extends ILocalizedString { /** * The title with a mnemonic designation. && precedes the mnemonic. */ mnemonicTitle?: string; } export type Icon = { dark?: URI; light?: URI } | ThemeIcon; export interface ICommandActionToggleInfo { /** * The condition that marks the action as toggled. */ condition: ContextKeyExpression; icon?: Icon; tooltip?: string; /** * The title that goes well with a a check mark, e.g "(check) Line Numbers" vs "Toggle Line Numbers" */ title?: string; /** * Like title but with a mnemonic designation. */ mnemonicTitle?: string; } export function isICommandActionToggleInfo(thing: ContextKeyExpression | ICommandActionToggleInfo | undefined): thing is ICommandActionToggleInfo { return thing ? (<ICommandActionToggleInfo>thing).condition !== undefined : false; } export interface ICommandActionSource { readonly id: string; readonly title: string; } export interface ICommandAction { id: string; title: string | ICommandActionTitle; shortTitle?: string | ICommandActionTitle; category?: keyof typeof Categories | ILocalizedString | string; tooltip?: string | ILocalizedString; icon?: Icon; source?: ICommandActionSource; precondition?: ContextKeyExpression; /** * The action is a toggle action. Define the context key expression that reflects its toggle-state * or define toggle-info including an icon and a title that goes well with a checkmark. */ toggled?: ContextKeyExpression | ICommandActionToggleInfo; } export type ISerializableCommandAction = UriDto<ICommandAction>;
src/vs/platform/action/common/action.ts
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.00017610908253118396, 0.00017268204828724265, 0.00016825842612888664, 0.00017278955783694983, 0.000002612727030282258 ]
{ "id": 0, "code_window": [ "\t\t\tseparator: insertAsImage ? '\\n' : ' ',\n", "\t\t});\n", "\n", "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet) : undefined;\n", "\t}));\n", "\n", "\tconst edit = new vscode.WorkspaceEdit();\n", "\tedit.set(activeEditor.document.uri, snippetEdits);\n" ], "labels": [ "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\treturn snippet ? new vscode.SnippetTextEdit(selection, snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/commands/insertResource.ts", "type": "replace", "edit_start_line_idx": 87 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ // @ts-check const perf = require('./vs/base/common/performance'); const performance = require('perf_hooks').performance; const product = require('../product.json'); const readline = require('readline'); const http = require('http'); perf.mark('code/server/start'); // @ts-ignore global.vscodeServerStartTime = performance.now(); async function start() { const minimist = require('minimist'); // Do a quick parse to determine if a server or the cli needs to be started const parsedArgs = minimist(process.argv.slice(2), { boolean: ['start-server', 'list-extensions', 'print-ip-address', 'help', 'version', 'accept-server-license-terms'], string: ['install-extension', 'install-builtin-extension', 'uninstall-extension', 'locate-extension', 'socket-path', 'host', 'port', 'compatibility'], alias: { help: 'h', version: 'v' } }); ['host', 'port', 'accept-server-license-terms'].forEach(e => { if (!parsedArgs[e]) { const envValue = process.env[`VSCODE_SERVER_${e.toUpperCase().replace('-', '_')}`]; if (envValue) { parsedArgs[e] = envValue; } } }); const extensionLookupArgs = ['list-extensions', 'locate-extension']; const extensionInstallArgs = ['install-extension', 'install-builtin-extension', 'uninstall-extension']; const shouldSpawnCli = parsedArgs.help || parsedArgs.version || extensionLookupArgs.some(a => !!parsedArgs[a]) || (extensionInstallArgs.some(a => !!parsedArgs[a]) && !parsedArgs['start-server']); if (shouldSpawnCli) { loadCode().then((mod) => { mod.spawnCli(); }); return; } if (parsedArgs['compatibility'] === '1.63') { console.warn(`server.sh is being replaced by 'bin/${product.serverApplicationName}'. Please migrate to the new command and adopt the following new default behaviors:`); console.warn('* connection token is mandatory unless --without-connection-token is used'); console.warn('* host defaults to `localhost`'); } /** * @typedef { import('./vs/server/node/remoteExtensionHostAgentServer').IServerAPI } IServerAPI */ /** @type {IServerAPI | null} */ let _remoteExtensionHostAgentServer = null; /** @type {Promise<IServerAPI> | null} */ let _remoteExtensionHostAgentServerPromise = null; /** @returns {Promise<IServerAPI>} */ const getRemoteExtensionHostAgentServer = () => { if (!_remoteExtensionHostAgentServerPromise) { _remoteExtensionHostAgentServerPromise = loadCode().then(async (mod) => { const server = await mod.createServer(address); _remoteExtensionHostAgentServer = server; return server; }); } return _remoteExtensionHostAgentServerPromise; }; const http = require('http'); const os = require('os'); if (Array.isArray(product.serverLicense) && product.serverLicense.length) { console.log(product.serverLicense.join('\n')); if (product.serverLicensePrompt && parsedArgs['accept-server-license-terms'] !== true) { if (hasStdinWithoutTty()) { console.log('To accept the license terms, start the server with --accept-server-license-terms'); process.exit(1); } try { const accept = await prompt(product.serverLicensePrompt); if (!accept) { process.exit(1); } } catch (e) { console.log(e); process.exit(1); } } } let firstRequest = true; let firstWebSocket = true; /** @type {string | import('net').AddressInfo | null} */ let address = null; const server = http.createServer(async (req, res) => { if (firstRequest) { firstRequest = false; perf.mark('code/server/firstRequest'); } const remoteExtensionHostAgentServer = await getRemoteExtensionHostAgentServer(); return remoteExtensionHostAgentServer.handleRequest(req, res); }); server.on('upgrade', async (req, socket) => { if (firstWebSocket) { firstWebSocket = false; perf.mark('code/server/firstWebSocket'); } const remoteExtensionHostAgentServer = await getRemoteExtensionHostAgentServer(); // @ts-ignore return remoteExtensionHostAgentServer.handleUpgrade(req, socket); }); server.on('error', async (err) => { const remoteExtensionHostAgentServer = await getRemoteExtensionHostAgentServer(); return remoteExtensionHostAgentServer.handleServerError(err); }); const host = sanitizeStringArg(parsedArgs['host']) || (parsedArgs['compatibility'] !== '1.63' ? 'localhost' : undefined); const nodeListenOptions = ( parsedArgs['socket-path'] ? { path: sanitizeStringArg(parsedArgs['socket-path']) } : { host, port: await parsePort(host, sanitizeStringArg(parsedArgs['port'])) } ); server.listen(nodeListenOptions, async () => { let output = Array.isArray(product.serverGreeting) && product.serverGreeting.length ? `\n\n${product.serverGreeting.join('\n')}\n\n` : ``; if (typeof nodeListenOptions.port === 'number' && parsedArgs['print-ip-address']) { const ifaces = os.networkInterfaces(); Object.keys(ifaces).forEach(function (ifname) { ifaces[ifname]?.forEach(function (iface) { if (!iface.internal && iface.family === 'IPv4') { output += `IP Address: ${iface.address}\n`; } }); }); } address = server.address(); if (address === null) { throw new Error('Unexpected server address'); } output += `Server bound to ${typeof address === 'string' ? address : `${address.address}:${address.port} (${address.family})`}\n`; // Do not change this line. VS Code looks for this in the output. output += `Extension host agent listening on ${typeof address === 'string' ? address : address.port}\n`; console.log(output); perf.mark('code/server/started'); // @ts-ignore global.vscodeServerListenTime = performance.now(); await getRemoteExtensionHostAgentServer(); }); process.on('exit', () => { server.close(); if (_remoteExtensionHostAgentServer) { _remoteExtensionHostAgentServer.dispose(); } }); } /** * @param {any} val * @returns {string | undefined} */ function sanitizeStringArg(val) { if (Array.isArray(val)) { // if an argument is passed multiple times, minimist creates an array val = val.pop(); // take the last item } return typeof val === 'string' ? val : undefined; } /** * If `--port` is specified and describes a single port, connect to that port. * * If `--port`describes a port range * then find a free port in that range. Throw error if no * free port available in range. * * In absence of specified ports, connect to port 8000. * @param {string | undefined} host * @param {string | undefined} strPort * @returns {Promise<number>} * @throws */ async function parsePort(host, strPort) { if (strPort) { let range; if (strPort.match(/^\d+$/)) { return parseInt(strPort, 10); } else if (range = parseRange(strPort)) { const port = await findFreePort(host, range.start, range.end); if (port !== undefined) { return port; } // Remote-SSH extension relies on this exact port error message, treat as an API console.warn(`--port: Could not find free port in range: ${range.start} - ${range.end} (inclusive).`); process.exit(1); } else { console.warn(`--port "${strPort}" is not a valid number or range. Ranges must be in the form 'from-to' with 'from' an integer larger than 0 and not larger than 'end'.`); process.exit(1); } } return 8000; } /** * @param {string} strRange * @returns {{ start: number; end: number } | undefined} */ function parseRange(strRange) { const match = strRange.match(/^(\d+)-(\d+)$/); if (match) { const start = parseInt(match[1], 10), end = parseInt(match[2], 10); if (start > 0 && start <= end && end <= 65535) { return { start, end }; } } return undefined; } /** * Starting at the `start` port, look for a free port incrementing * by 1 until `end` inclusive. If no free port is found, undefined is returned. * * @param {string | undefined} host * @param {number} start * @param {number} end * @returns {Promise<number | undefined>} * @throws */ async function findFreePort(host, start, end) { const testPort = (/** @type {number} */ port) => { return new Promise((resolve) => { const server = http.createServer(); server.listen(port, host, () => { server.close(); resolve(true); }).on('error', () => { resolve(false); }); }); }; for (let port = start; port <= end; port++) { if (await testPort(port)) { return port; } } return undefined; } /** @returns { Promise<typeof import('./vs/server/node/server.main')> } */ function loadCode() { return new Promise((resolve, reject) => { const path = require('path'); delete process.env['ELECTRON_RUN_AS_NODE']; // Keep bootstrap-amd.js from redefining 'fs'. // See https://github.com/microsoft/vscode-remote-release/issues/6543 // We would normally install a SIGPIPE listener in bootstrap.js // But in certain situations, the console itself can be in a broken pipe state // so logging SIGPIPE to the console will cause an infinite async loop process.env['VSCODE_HANDLES_SIGPIPE'] = 'true'; if (process.env['VSCODE_DEV']) { // When running out of sources, we need to load node modules from remote/node_modules, // which are compiled against nodejs, not electron process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH'] = process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH'] || path.join(__dirname, '..', 'remote', 'node_modules'); require('./bootstrap-node').injectNodeModuleLookupPath(process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH']); } else { delete process.env['VSCODE_INJECT_NODE_MODULE_LOOKUP_PATH']; } require('./bootstrap-amd').load('vs/server/node/server.main', resolve, reject); }); } function hasStdinWithoutTty() { try { return !process.stdin.isTTY; // Via https://twitter.com/MylesBorins/status/782009479382626304 } catch (error) { // Windows workaround for https://github.com/nodejs/node/issues/11656 } return false; } /** * @param {string} question * @returns { Promise<boolean> } */ function prompt(question) { const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); return new Promise((resolve, reject) => { rl.question(question + ' ', async function (data) { rl.close(); const str = data.toString().trim().toLowerCase(); if (str === '' || str === 'y' || str === 'yes') { resolve(true); } else if (str === 'n' || str === 'no') { resolve(false); } else { process.stdout.write('\nInvalid Response. Answer either yes (y, yes) or no (n, no)\n'); resolve(await prompt(question)); } }); }); } start();
src/server-main.js
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.000176604138687253, 0.00017298605234827846, 0.00016586044512223452, 0.0001740775624057278, 0.000003043207243536017 ]
{ "id": 1, "code_window": [ "\t\tif (file.uri) {\n", "\t\t\t// If file is already in workspace, we don't want to create a copy of it\n", "\t\t\tconst workspaceFolder = vscode.workspace.getWorkspaceFolder(file.uri);\n", "\t\t\tif (workspaceFolder) {\n", "\t\t\t\tconst snippet = createUriListSnippet(document, [file.uri]);\n", "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet) : undefined;\n", "\t\t\t}\n", "\t\t}\n", "\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts", "type": "replace", "edit_start_line_idx": 57 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as vscode from 'vscode'; import { Schemes } from '../../util/schemes'; import { getNewFileName } from './copyFiles'; import { createUriListSnippet, tryGetUriListSnippet } from './dropIntoEditor'; const supportedImageMimes = new Set([ 'image/png', 'image/jpg', ]); class PasteEditProvider implements vscode.DocumentPasteEditProvider { async provideDocumentPasteEdits( document: vscode.TextDocument, _ranges: readonly vscode.Range[], dataTransfer: vscode.DataTransfer, token: vscode.CancellationToken, ): Promise<vscode.DocumentPasteEdit | undefined> { const enabled = vscode.workspace.getConfiguration('markdown', document).get('experimental.editor.pasteLinks.enabled', true); if (!enabled) { return; } if (document.uri.scheme === Schemes.notebookCell) { return; } for (const imageMime of supportedImageMimes) { const item = dataTransfer.get(imageMime); const file = item?.asFile(); if (item && file) { const edit = await this._makeCreateImagePasteEdit(document, file, token); if (token.isCancellationRequested) { return; } if (edit) { return edit; } } } const snippet = await tryGetUriListSnippet(document, dataTransfer, token); return snippet ? new vscode.DocumentPasteEdit(snippet.snippet) : undefined; } private async _makeCreateImagePasteEdit(document: vscode.TextDocument, file: vscode.DataTransferFile, token: vscode.CancellationToken): Promise<vscode.DocumentPasteEdit | undefined> { if (file.uri) { // If file is already in workspace, we don't want to create a copy of it const workspaceFolder = vscode.workspace.getWorkspaceFolder(file.uri); if (workspaceFolder) { const snippet = createUriListSnippet(document, [file.uri]); return snippet ? new vscode.DocumentPasteEdit(snippet) : undefined; } } const uri = await getNewFileName(document, file); if (token.isCancellationRequested) { return; } const snippet = createUriListSnippet(document, [uri]); if (!snippet) { return; } // Note that there is currently no way to undo the file creation :/ const workspaceEdit = new vscode.WorkspaceEdit(); workspaceEdit.createFile(uri, { contents: file }); const pasteEdit = new vscode.DocumentPasteEdit(snippet); pasteEdit.additionalEdit = workspaceEdit; return pasteEdit; } } export function registerPasteSupport(selector: vscode.DocumentSelector,) { return vscode.languages.registerDocumentPasteEditProvider(selector, new PasteEditProvider(), { pasteMimeTypes: [ 'text/uri-list', ...supportedImageMimes, ] }); }
extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts
1
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.99861741065979, 0.22496573626995087, 0.0004288616473786533, 0.006581333000212908, 0.40175604820251465 ]
{ "id": 1, "code_window": [ "\t\tif (file.uri) {\n", "\t\t\t// If file is already in workspace, we don't want to create a copy of it\n", "\t\t\tconst workspaceFolder = vscode.workspace.getWorkspaceFolder(file.uri);\n", "\t\t\tif (workspaceFolder) {\n", "\t\t\t\tconst snippet = createUriListSnippet(document, [file.uri]);\n", "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet) : undefined;\n", "\t\t\t}\n", "\t\t}\n", "\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts", "type": "replace", "edit_start_line_idx": 57 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { WebviewStyles } from 'vs/workbench/contrib/webview/browser/webview'; const mapping: ReadonlyMap<string, string> = new Map([ ['theme-font-family', 'vscode-font-family'], ['theme-font-weight', 'vscode-font-weight'], ['theme-font-size', 'vscode-font-size'], ['theme-code-font-family', 'vscode-editor-font-family'], ['theme-code-font-weight', 'vscode-editor-font-weight'], ['theme-code-font-size', 'vscode-editor-font-size'], ['theme-scrollbar-background', 'vscode-scrollbarSlider-background'], ['theme-scrollbar-hover-background', 'vscode-scrollbarSlider-hoverBackground'], ['theme-scrollbar-active-background', 'vscode-scrollbarSlider-activeBackground'], ['theme-quote-background', 'vscode-textBlockQuote-background'], ['theme-quote-border', 'vscode-textBlockQuote-border'], ['theme-code-foreground', 'vscode-textPreformat-foreground'], // Editor ['theme-background', 'vscode-editor-background'], ['theme-foreground', 'vscode-editor-foreground'], ['theme-ui-foreground', 'vscode-foreground'], ['theme-link', 'vscode-textLink-foreground'], ['theme-link-active', 'vscode-textLink-activeForeground'], // Buttons ['theme-button-background', 'vscode-button-background'], ['theme-button-hover-background', 'vscode-button-hoverBackground'], ['theme-button-foreground', 'vscode-button-foreground'], ['theme-button-secondary-background', 'vscode-button-secondaryBackground'], ['theme-button-secondary-hover-background', 'vscode-button-secondaryHoverBackground'], ['theme-button-secondary-foreground', 'vscode-button-secondaryForeground'], ['theme-button-hover-foreground', 'vscode-button-foreground'], ['theme-button-focus-foreground', 'vscode-button-foreground'], ['theme-button-secondary-hover-foreground', 'vscode-button-secondaryForeground'], ['theme-button-secondary-focus-foreground', 'vscode-button-secondaryForeground'], // Inputs ['theme-input-background', 'vscode-input-background'], ['theme-input-foreground', 'vscode-input-foreground'], ['theme-input-placeholder-foreground', 'vscode-input-placeholderForeground'], ['theme-input-focus-border-color', 'vscode-focusBorder'], // Menus ['theme-menu-background', 'vscode-menu-background'], ['theme-menu-foreground', 'vscode-menu-foreground'], ['theme-menu-hover-background', 'vscode-menu-selectionBackground'], ['theme-menu-focus-background', 'vscode-menu-selectionBackground'], ['theme-menu-hover-foreground', 'vscode-menu-selectionForeground'], ['theme-menu-focus-foreground', 'vscode-menu-selectionForeground'], // Errors ['theme-error-background', 'vscode-inputValidation-errorBackground'], ['theme-error-foreground', 'vscode-foreground'], ['theme-warning-background', 'vscode-inputValidation-warningBackground'], ['theme-warning-foreground', 'vscode-foreground'], ['theme-info-background', 'vscode-inputValidation-infoBackground'], ['theme-info-foreground', 'vscode-foreground'], // Notebook: ['theme-notebook-output-background', 'vscode-notebook-outputContainerBackgroundColor'], ['theme-notebook-output-border', 'vscode-notebook-outputContainerBorderColor'], ['theme-notebook-cell-selected-background', 'vscode-notebook-selectedCellBackground'], ['theme-notebook-symbol-highlight-background', 'vscode-notebook-symbolHighlightBackground'], ['theme-notebook-diff-removed-background', 'vscode-diffEditor-removedTextBackground'], ['theme-notebook-diff-inserted-background', 'vscode-diffEditor-insertedTextBackground'], ]); const constants: Readonly<WebviewStyles> = { 'theme-input-border-width': '1px', 'theme-button-primary-hover-shadow': 'none', 'theme-button-secondary-hover-shadow': 'none', 'theme-input-border-color': 'transparent', }; /** * Transforms base vscode theme variables into generic variables for notebook * renderers. * @see https://github.com/microsoft/vscode/issues/107985 for context * @deprecated */ export const transformWebviewThemeVars = (s: Readonly<WebviewStyles>): WebviewStyles => { const result = { ...s, ...constants }; for (const [target, src] of mapping) { result[target] = s[src]; } return result; };
src/vs/workbench/contrib/notebook/browser/view/renderers/webviewThemeMapping.ts
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.00017618649872019887, 0.00017036714416462928, 0.00016478206089232117, 0.00017069738532882184, 0.000003595504722397891 ]
{ "id": 1, "code_window": [ "\t\tif (file.uri) {\n", "\t\t\t// If file is already in workspace, we don't want to create a copy of it\n", "\t\t\tconst workspaceFolder = vscode.workspace.getWorkspaceFolder(file.uri);\n", "\t\t\tif (workspaceFolder) {\n", "\t\t\t\tconst snippet = createUriListSnippet(document, [file.uri]);\n", "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet) : undefined;\n", "\t\t\t}\n", "\t\t}\n", "\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts", "type": "replace", "edit_start_line_idx": 57 }
alias brew_list="brew leaves" alias brew-list="brew leaves"
extensions/vscode-colorize-tests/test/colorize-fixtures/test-173224.sh
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.00017561977438163012, 0.00017561977438163012, 0.00017561977438163012, 0.00017561977438163012, 0 ]
{ "id": 1, "code_window": [ "\t\tif (file.uri) {\n", "\t\t\t// If file is already in workspace, we don't want to create a copy of it\n", "\t\t\tconst workspaceFolder = vscode.workspace.getWorkspaceFolder(file.uri);\n", "\t\t\tif (workspaceFolder) {\n", "\t\t\t\tconst snippet = createUriListSnippet(document, [file.uri]);\n", "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet) : undefined;\n", "\t\t\t}\n", "\t\t}\n", "\n" ], "labels": [ "keep", "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep" ], "after_edit": [ "\t\t\t\treturn snippet ? new vscode.DocumentPasteEdit(snippet.snippet) : undefined;\n" ], "file_path": "extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts", "type": "replace", "edit_start_line_idx": 57 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ /** Dialog: Modal Block */ .monaco-dialog-modal-block { position: fixed; height: 100%; width: 100%; left:0; top:0; z-index: 2600; display: flex; justify-content: center; align-items: center; } .monaco-dialog-modal-block.dimmed { background: rgba(0, 0, 0, 0.3); } /** Dialog: Container */ .monaco-dialog-box { display: flex; flex-direction: column-reverse; width: min-content; min-width: 500px; max-width: 90vw; min-height: 75px; padding: 10px; transform: translate3d(0px, 0px, 0px); } /** Dialog: Title Actions Row */ .monaco-dialog-box .dialog-toolbar-row { height: 22px; padding-bottom: 4px; } .monaco-dialog-box .dialog-toolbar-row .actions-container { justify-content: flex-end; } /** Dialog: Message Row */ .monaco-dialog-box .dialog-message-row { display: flex; flex-grow: 1; align-items: center; padding: 0 10px; } .monaco-dialog-box .dialog-message-row > .dialog-icon.codicon { flex: 0 0 48px; height: 48px; align-self: baseline; font-size: 48px; } /** Dialog: Message Container */ .monaco-dialog-box .dialog-message-row .dialog-message-container { display: flex; flex-direction: column; overflow: hidden; text-overflow: ellipsis; padding-left: 24px; user-select: text; -webkit-user-select: text; word-wrap: break-word; /* never overflow long words, but break to next line */ white-space: normal; } /** Dialog: Message */ .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-message { line-height: 22px; font-size: 18px; flex: 1; /* let the message always grow */ white-space: normal; word-wrap: break-word; /* never overflow long words, but break to next line */ min-height: 48px; /* matches icon height */ margin-bottom: 8px; display: flex; align-items: center; } /** Dialog: Details */ .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-message-detail { line-height: 22px; flex: 1; /* let the message always grow */ } .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-message a:focus { outline-width: 1px; outline-style: solid; } /** Dialog: Checkbox */ .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-checkbox-row { padding: 15px 0px 0px; display: flex; } .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-checkbox-row .dialog-checkbox-message { cursor: pointer; user-select: none; -webkit-user-select: none; } /** Dialog: Input */ .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-message-input { padding: 15px 0px 0px; display: flex; } .monaco-dialog-box .dialog-message-row .dialog-message-container .dialog-message-input .monaco-inputbox { flex: 1; } /** Dialog: File Path */ .monaco-dialog-box code { font-family: var(--monaco-monospace-font); } /** Dialog: Buttons Row */ .monaco-dialog-box > .dialog-buttons-row { display: flex; align-items: center; padding-right: 1px; overflow: hidden; /* buttons row should never overflow */ } .monaco-dialog-box > .dialog-buttons-row { display: flex; white-space: nowrap; padding: 20px 10px 10px; } /** Dialog: Buttons */ .monaco-dialog-box > .dialog-buttons-row > .dialog-buttons { display: flex; width: 100%; justify-content: flex-end; overflow: hidden; margin-left: 67px; /* for long buttons, force align with text */ } .monaco-dialog-box > .dialog-buttons-row > .dialog-buttons > .monaco-button { width: fit-content; padding: 5px 10px; overflow: hidden; text-overflow: ellipsis; margin: 4px 5px; /* allows button focus outline to be visible */ outline-offset: 2px !important; }
src/vs/base/browser/ui/dialog/dialog.css
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.00017831769946496934, 0.00017519918037578464, 0.00016912442515604198, 0.00017553195357322693, 0.0000021750352061644662 ]
{ "id": 2, "code_window": [ "\t\t// Note that there is currently no way to undo the file creation :/\n", "\t\tconst workspaceEdit = new vscode.WorkspaceEdit();\n", "\t\tworkspaceEdit.createFile(uri, { contents: file });\n", "\n", "\t\tconst pasteEdit = new vscode.DocumentPasteEdit(snippet);\n", "\t\tpasteEdit.additionalEdit = workspaceEdit;\n", "\t\treturn pasteEdit;\n", "\t}\n", "}\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tconst pasteEdit = new vscode.DocumentPasteEdit(snippet.snippet);\n" ], "file_path": "extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts", "type": "replace", "edit_start_line_idx": 75 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as vscode from 'vscode'; import { Schemes } from '../../util/schemes'; import { getNewFileName } from './copyFiles'; import { createUriListSnippet, tryGetUriListSnippet } from './dropIntoEditor'; const supportedImageMimes = new Set([ 'image/png', 'image/jpg', ]); class PasteEditProvider implements vscode.DocumentPasteEditProvider { async provideDocumentPasteEdits( document: vscode.TextDocument, _ranges: readonly vscode.Range[], dataTransfer: vscode.DataTransfer, token: vscode.CancellationToken, ): Promise<vscode.DocumentPasteEdit | undefined> { const enabled = vscode.workspace.getConfiguration('markdown', document).get('experimental.editor.pasteLinks.enabled', true); if (!enabled) { return; } if (document.uri.scheme === Schemes.notebookCell) { return; } for (const imageMime of supportedImageMimes) { const item = dataTransfer.get(imageMime); const file = item?.asFile(); if (item && file) { const edit = await this._makeCreateImagePasteEdit(document, file, token); if (token.isCancellationRequested) { return; } if (edit) { return edit; } } } const snippet = await tryGetUriListSnippet(document, dataTransfer, token); return snippet ? new vscode.DocumentPasteEdit(snippet.snippet) : undefined; } private async _makeCreateImagePasteEdit(document: vscode.TextDocument, file: vscode.DataTransferFile, token: vscode.CancellationToken): Promise<vscode.DocumentPasteEdit | undefined> { if (file.uri) { // If file is already in workspace, we don't want to create a copy of it const workspaceFolder = vscode.workspace.getWorkspaceFolder(file.uri); if (workspaceFolder) { const snippet = createUriListSnippet(document, [file.uri]); return snippet ? new vscode.DocumentPasteEdit(snippet) : undefined; } } const uri = await getNewFileName(document, file); if (token.isCancellationRequested) { return; } const snippet = createUriListSnippet(document, [uri]); if (!snippet) { return; } // Note that there is currently no way to undo the file creation :/ const workspaceEdit = new vscode.WorkspaceEdit(); workspaceEdit.createFile(uri, { contents: file }); const pasteEdit = new vscode.DocumentPasteEdit(snippet); pasteEdit.additionalEdit = workspaceEdit; return pasteEdit; } } export function registerPasteSupport(selector: vscode.DocumentSelector,) { return vscode.languages.registerDocumentPasteEditProvider(selector, new PasteEditProvider(), { pasteMimeTypes: [ 'text/uri-list', ...supportedImageMimes, ] }); }
extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts
1
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.9985675811767578, 0.11449667811393738, 0.00017048706649802625, 0.002209818223491311, 0.3125917613506317 ]
{ "id": 2, "code_window": [ "\t\t// Note that there is currently no way to undo the file creation :/\n", "\t\tconst workspaceEdit = new vscode.WorkspaceEdit();\n", "\t\tworkspaceEdit.createFile(uri, { contents: file });\n", "\n", "\t\tconst pasteEdit = new vscode.DocumentPasteEdit(snippet);\n", "\t\tpasteEdit.additionalEdit = workspaceEdit;\n", "\t\treturn pasteEdit;\n", "\t}\n", "}\n" ], "labels": [ "keep", "keep", "keep", "keep", "replace", "keep", "keep", "keep", "keep" ], "after_edit": [ "\t\tconst pasteEdit = new vscode.DocumentPasteEdit(snippet.snippet);\n" ], "file_path": "extensions/markdown-language-features/src/languageFeatures/copyFiles/copyPaste.ts", "type": "replace", "edit_start_line_idx": 75 }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as assert from 'assert'; import { URI as uri } from 'vs/base/common/uri'; import { DebugModel, Breakpoint } from 'vs/workbench/contrib/debug/common/debugModel'; import { getExpandedBodySize, getBreakpointMessageAndIcon } from 'vs/workbench/contrib/debug/browser/breakpointsView'; import { DisposableStore, dispose } from 'vs/base/common/lifecycle'; import { Range } from 'vs/editor/common/core/range'; import { IBreakpointData, IBreakpointUpdateData, IDebugService, State } from 'vs/workbench/contrib/debug/common/debug'; import { createBreakpointDecorations } from 'vs/workbench/contrib/debug/browser/breakpointEditorContribution'; import { OverviewRulerLane } from 'vs/editor/common/model'; import { MarkdownString } from 'vs/base/common/htmlContent'; import { createTextModel } from 'vs/editor/test/common/testTextModel'; import { createTestSession } from 'vs/workbench/contrib/debug/test/browser/callStack.test'; import { TestInstantiationService } from 'vs/platform/instantiation/test/common/instantiationServiceMock'; import { ILanguageService } from 'vs/editor/common/languages/language'; import { LanguageService } from 'vs/editor/common/services/languageService'; import { createMockDebugModel } from 'vs/workbench/contrib/debug/test/browser/mockDebugModel'; import { MockDebugService } from 'vs/workbench/contrib/debug/test/common/mockDebug'; function addBreakpointsAndCheckEvents(model: DebugModel, uri: uri, data: IBreakpointData[]): void { let eventCount = 0; const toDispose = model.onDidChangeBreakpoints(e => { assert.strictEqual(e?.sessionOnly, false); assert.strictEqual(e?.changed, undefined); assert.strictEqual(e?.removed, undefined); const added = e?.added; assert.notStrictEqual(added, undefined); assert.strictEqual(added!.length, data.length); eventCount++; dispose(toDispose); for (let i = 0; i < data.length; i++) { assert.strictEqual(e!.added![i] instanceof Breakpoint, true); assert.strictEqual((e!.added![i] as Breakpoint).lineNumber, data[i].lineNumber); } }); model.addBreakpoints(uri, data); assert.strictEqual(eventCount, 1); } suite('Debug - Breakpoints', () => { let model: DebugModel; const disposables = new DisposableStore(); setup(() => { model = createMockDebugModel(); }); teardown(() => { disposables.clear(); }); // Breakpoints test('simple', () => { const modelUri = uri.file('/myfolder/myfile.js'); addBreakpointsAndCheckEvents(model, modelUri, [{ lineNumber: 5, enabled: true }, { lineNumber: 10, enabled: false }]); assert.strictEqual(model.areBreakpointsActivated(), true); assert.strictEqual(model.getBreakpoints().length, 2); let eventCount = 0; const toDispose = model.onDidChangeBreakpoints(e => { eventCount++; assert.strictEqual(e?.added, undefined); assert.strictEqual(e?.sessionOnly, false); assert.strictEqual(e?.removed?.length, 2); assert.strictEqual(e?.changed, undefined); dispose(toDispose); }); model.removeBreakpoints(model.getBreakpoints()); assert.strictEqual(eventCount, 1); assert.strictEqual(model.getBreakpoints().length, 0); }); test('toggling', () => { const modelUri = uri.file('/myfolder/myfile.js'); addBreakpointsAndCheckEvents(model, modelUri, [{ lineNumber: 5, enabled: true }, { lineNumber: 10, enabled: false }]); addBreakpointsAndCheckEvents(model, modelUri, [{ lineNumber: 12, enabled: true, condition: 'fake condition' }]); assert.strictEqual(model.getBreakpoints().length, 3); const bp = model.getBreakpoints().pop(); if (bp) { model.removeBreakpoints([bp]); } assert.strictEqual(model.getBreakpoints().length, 2); model.setBreakpointsActivated(false); assert.strictEqual(model.areBreakpointsActivated(), false); model.setBreakpointsActivated(true); assert.strictEqual(model.areBreakpointsActivated(), true); }); test('two files', () => { const modelUri1 = uri.file('/myfolder/my file first.js'); const modelUri2 = uri.file('/secondfolder/second/second file.js'); addBreakpointsAndCheckEvents(model, modelUri1, [{ lineNumber: 5, enabled: true }, { lineNumber: 10, enabled: false }]); assert.strictEqual(getExpandedBodySize(model, undefined, 9), 44); addBreakpointsAndCheckEvents(model, modelUri2, [{ lineNumber: 1, enabled: true }, { lineNumber: 2, enabled: true }, { lineNumber: 3, enabled: false }]); assert.strictEqual(getExpandedBodySize(model, undefined, 9), 110); assert.strictEqual(model.getBreakpoints().length, 5); assert.strictEqual(model.getBreakpoints({ uri: modelUri1 }).length, 2); assert.strictEqual(model.getBreakpoints({ uri: modelUri2 }).length, 3); assert.strictEqual(model.getBreakpoints({ lineNumber: 5 }).length, 1); assert.strictEqual(model.getBreakpoints({ column: 5 }).length, 0); const bp = model.getBreakpoints()[0]; const update = new Map<string, IBreakpointUpdateData>(); update.set(bp.getId(), { lineNumber: 100 }); let eventFired = false; const toDispose = model.onDidChangeBreakpoints(e => { eventFired = true; assert.strictEqual(e?.added, undefined); assert.strictEqual(e?.removed, undefined); assert.strictEqual(e?.changed?.length, 1); dispose(toDispose); }); model.updateBreakpoints(update); assert.strictEqual(eventFired, true); assert.strictEqual(bp.lineNumber, 100); assert.strictEqual(model.getBreakpoints({ enabledOnly: true }).length, 3); model.enableOrDisableAllBreakpoints(false); model.getBreakpoints().forEach(bp => { assert.strictEqual(bp.enabled, false); }); assert.strictEqual(model.getBreakpoints({ enabledOnly: true }).length, 0); model.setEnablement(bp, true); assert.strictEqual(bp.enabled, true); model.removeBreakpoints(model.getBreakpoints({ uri: modelUri1 })); assert.strictEqual(getExpandedBodySize(model, undefined, 9), 66); assert.strictEqual(model.getBreakpoints().length, 3); }); test('conditions', () => { const modelUri1 = uri.file('/myfolder/my file first.js'); addBreakpointsAndCheckEvents(model, modelUri1, [{ lineNumber: 5, condition: 'i < 5', hitCondition: '17' }, { lineNumber: 10, condition: 'j < 3' }]); const breakpoints = model.getBreakpoints(); assert.strictEqual(breakpoints[0].condition, 'i < 5'); assert.strictEqual(breakpoints[0].hitCondition, '17'); assert.strictEqual(breakpoints[1].condition, 'j < 3'); assert.strictEqual(!!breakpoints[1].hitCondition, false); assert.strictEqual(model.getBreakpoints().length, 2); model.removeBreakpoints(model.getBreakpoints()); assert.strictEqual(model.getBreakpoints().length, 0); }); test('function breakpoints', () => { model.addFunctionBreakpoint('foo', '1'); model.addFunctionBreakpoint('bar', '2'); model.updateFunctionBreakpoint('1', { name: 'fooUpdated' }); model.updateFunctionBreakpoint('2', { name: 'barUpdated' }); const functionBps = model.getFunctionBreakpoints(); assert.strictEqual(functionBps[0].name, 'fooUpdated'); assert.strictEqual(functionBps[1].name, 'barUpdated'); model.removeFunctionBreakpoints(); assert.strictEqual(model.getFunctionBreakpoints().length, 0); }); test('multiple sessions', () => { const modelUri = uri.file('/myfolder/myfile.js'); addBreakpointsAndCheckEvents(model, modelUri, [{ lineNumber: 5, enabled: true, condition: 'x > 5' }, { lineNumber: 10, enabled: false }]); const breakpoints = model.getBreakpoints(); const session = createTestSession(model); const data = new Map<string, DebugProtocol.Breakpoint>(); assert.strictEqual(breakpoints[0].lineNumber, 5); assert.strictEqual(breakpoints[1].lineNumber, 10); data.set(breakpoints[0].getId(), { verified: false, line: 10 }); data.set(breakpoints[1].getId(), { verified: true, line: 50 }); model.setBreakpointSessionData(session.getId(), {}, data); assert.strictEqual(breakpoints[0].lineNumber, 5); assert.strictEqual(breakpoints[1].lineNumber, 50); const session2 = createTestSession(model); const data2 = new Map<string, DebugProtocol.Breakpoint>(); data2.set(breakpoints[0].getId(), { verified: true, line: 100 }); data2.set(breakpoints[1].getId(), { verified: true, line: 500 }); model.setBreakpointSessionData(session2.getId(), {}, data2); // Breakpoint is verified only once, show that line assert.strictEqual(breakpoints[0].lineNumber, 100); // Breakpoint is verified two times, show the original line assert.strictEqual(breakpoints[1].lineNumber, 10); model.setBreakpointSessionData(session.getId(), {}, undefined); // No more double session verification assert.strictEqual(breakpoints[0].lineNumber, 100); assert.strictEqual(breakpoints[1].lineNumber, 500); assert.strictEqual(breakpoints[0].supported, false); const data3 = new Map<string, DebugProtocol.Breakpoint>(); data3.set(breakpoints[0].getId(), { verified: true, line: 500 }); model.setBreakpointSessionData(session2.getId(), { supportsConditionalBreakpoints: true }, data2); assert.strictEqual(breakpoints[0].supported, true); }); test('exception breakpoints', () => { let eventCount = 0; model.onDidChangeBreakpoints(() => eventCount++); model.setExceptionBreakpointsForSession("session-id-1", [{ filter: 'uncaught', label: 'UNCAUGHT', default: true }]); assert.strictEqual(eventCount, 1); let exceptionBreakpoints = model.getExceptionBreakpointsForSession("session-id-1"); assert.strictEqual(exceptionBreakpoints.length, 1); assert.strictEqual(exceptionBreakpoints[0].filter, 'uncaught'); assert.strictEqual(exceptionBreakpoints[0].enabled, true); model.setExceptionBreakpointsForSession("session-id-2", [{ filter: 'uncaught', label: 'UNCAUGHT' }, { filter: 'caught', label: 'CAUGHT' }]); assert.strictEqual(eventCount, 2); exceptionBreakpoints = model.getExceptionBreakpointsForSession("session-id-2"); assert.strictEqual(exceptionBreakpoints.length, 2); assert.strictEqual(exceptionBreakpoints[0].filter, 'uncaught'); assert.strictEqual(exceptionBreakpoints[0].enabled, true); assert.strictEqual(exceptionBreakpoints[1].filter, 'caught'); assert.strictEqual(exceptionBreakpoints[1].label, 'CAUGHT'); assert.strictEqual(exceptionBreakpoints[1].enabled, false); model.setExceptionBreakpointsForSession("session-id-3", [{ filter: 'all', label: 'ALL' }]); assert.strictEqual(eventCount, 3); assert.strictEqual(model.getExceptionBreakpointsForSession("session-id-3").length, 1); exceptionBreakpoints = model.getExceptionBreakpoints(); assert.strictEqual(exceptionBreakpoints[0].filter, 'uncaught'); assert.strictEqual(exceptionBreakpoints[0].enabled, true); assert.strictEqual(exceptionBreakpoints[1].filter, 'caught'); assert.strictEqual(exceptionBreakpoints[1].label, 'CAUGHT'); assert.strictEqual(exceptionBreakpoints[1].enabled, false); assert.strictEqual(exceptionBreakpoints[2].filter, 'all'); assert.strictEqual(exceptionBreakpoints[2].label, 'ALL'); }); test('exception breakpoints multiple sessions', () => { let eventCount = 0; model.onDidChangeBreakpoints(() => eventCount++); model.setExceptionBreakpointsForSession("session-id-4", [{ filter: 'uncaught', label: 'UNCAUGHT', default: true }, { filter: 'caught', label: 'CAUGHT' }]); model.setExceptionBreakpointFallbackSession("session-id-4"); assert.strictEqual(eventCount, 1); let exceptionBreakpointsForSession = model.getExceptionBreakpointsForSession("session-id-4"); assert.strictEqual(exceptionBreakpointsForSession.length, 2); assert.strictEqual(exceptionBreakpointsForSession[0].filter, 'uncaught'); assert.strictEqual(exceptionBreakpointsForSession[1].filter, 'caught'); model.setExceptionBreakpointsForSession("session-id-5", [{ filter: 'all', label: 'ALL' }, { filter: 'caught', label: 'CAUGHT' }]); assert.strictEqual(eventCount, 2); exceptionBreakpointsForSession = model.getExceptionBreakpointsForSession("session-id-5"); let exceptionBreakpointsForUndefined = model.getExceptionBreakpointsForSession(undefined); assert.strictEqual(exceptionBreakpointsForSession.length, 2); assert.strictEqual(exceptionBreakpointsForSession[0].filter, 'caught'); assert.strictEqual(exceptionBreakpointsForSession[1].filter, 'all'); assert.strictEqual(exceptionBreakpointsForUndefined.length, 2); assert.strictEqual(exceptionBreakpointsForUndefined[0].filter, 'uncaught'); assert.strictEqual(exceptionBreakpointsForUndefined[1].filter, 'caught'); model.removeExceptionBreakpointsForSession("session-id-4"); assert.strictEqual(eventCount, 2); exceptionBreakpointsForUndefined = model.getExceptionBreakpointsForSession(undefined); assert.strictEqual(exceptionBreakpointsForUndefined.length, 2); assert.strictEqual(exceptionBreakpointsForUndefined[0].filter, 'uncaught'); assert.strictEqual(exceptionBreakpointsForUndefined[1].filter, 'caught'); model.setExceptionBreakpointFallbackSession("session-id-5"); assert.strictEqual(eventCount, 2); exceptionBreakpointsForUndefined = model.getExceptionBreakpointsForSession(undefined); assert.strictEqual(exceptionBreakpointsForUndefined.length, 2); assert.strictEqual(exceptionBreakpointsForUndefined[0].filter, 'caught'); assert.strictEqual(exceptionBreakpointsForUndefined[1].filter, 'all'); const exceptionBreakpoints = model.getExceptionBreakpoints(); assert.strictEqual(exceptionBreakpoints.length, 3); }); test('instruction breakpoints', () => { let eventCount = 0; model.onDidChangeBreakpoints(() => eventCount++); //address: string, offset: number, condition?: string, hitCondition?: string model.addInstructionBreakpoint('0xCCCCFFFF', 0); assert.strictEqual(eventCount, 1); let instructionBreakpoints = model.getInstructionBreakpoints(); assert.strictEqual(instructionBreakpoints.length, 1); assert.strictEqual(instructionBreakpoints[0].instructionReference, '0xCCCCFFFF'); assert.strictEqual(instructionBreakpoints[0].offset, 0); model.addInstructionBreakpoint('0xCCCCEEEE', 1); assert.strictEqual(eventCount, 2); instructionBreakpoints = model.getInstructionBreakpoints(); assert.strictEqual(instructionBreakpoints.length, 2); assert.strictEqual(instructionBreakpoints[0].instructionReference, '0xCCCCFFFF'); assert.strictEqual(instructionBreakpoints[0].offset, 0); assert.strictEqual(instructionBreakpoints[1].instructionReference, '0xCCCCEEEE'); assert.strictEqual(instructionBreakpoints[1].offset, 1); }); test('data breakpoints', () => { let eventCount = 0; model.onDidChangeBreakpoints(() => eventCount++); model.addDataBreakpoint('label', 'id', true, ['read'], 'read'); model.addDataBreakpoint('second', 'secondId', false, ['readWrite'], 'readWrite'); const dataBreakpoints = model.getDataBreakpoints(); assert.strictEqual(dataBreakpoints[0].canPersist, true); assert.strictEqual(dataBreakpoints[0].dataId, 'id'); assert.strictEqual(dataBreakpoints[0].accessType, 'read'); assert.strictEqual(dataBreakpoints[1].canPersist, false); assert.strictEqual(dataBreakpoints[1].description, 'second'); assert.strictEqual(dataBreakpoints[1].accessType, 'readWrite'); assert.strictEqual(eventCount, 2); model.removeDataBreakpoints(dataBreakpoints[0].getId()); assert.strictEqual(eventCount, 3); assert.strictEqual(model.getDataBreakpoints().length, 1); model.removeDataBreakpoints(); assert.strictEqual(model.getDataBreakpoints().length, 0); assert.strictEqual(eventCount, 4); }); test('message and class name', () => { const modelUri = uri.file('/myfolder/my file first.js'); addBreakpointsAndCheckEvents(model, modelUri, [ { lineNumber: 5, enabled: true, condition: 'x > 5' }, { lineNumber: 10, enabled: false }, { lineNumber: 12, enabled: true, logMessage: 'hello' }, { lineNumber: 15, enabled: true, hitCondition: '12' }, { lineNumber: 500, enabled: true }, ]); const breakpoints = model.getBreakpoints(); let result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[0]); assert.strictEqual(result.message, 'Expression condition: x > 5'); assert.strictEqual(result.icon.id, 'debug-breakpoint-conditional'); result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[1]); assert.strictEqual(result.message, 'Disabled Breakpoint'); assert.strictEqual(result.icon.id, 'debug-breakpoint-disabled'); result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[2]); assert.strictEqual(result.message, 'Log Message: hello'); assert.strictEqual(result.icon.id, 'debug-breakpoint-log'); result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[3]); assert.strictEqual(result.message, 'Hit Count: 12'); assert.strictEqual(result.icon.id, 'debug-breakpoint-conditional'); result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[4]); assert.strictEqual(result.message, 'Breakpoint'); assert.strictEqual(result.icon.id, 'debug-breakpoint'); result = getBreakpointMessageAndIcon(State.Stopped, false, breakpoints[2]); assert.strictEqual(result.message, 'Disabled Logpoint'); assert.strictEqual(result.icon.id, 'debug-breakpoint-log-disabled'); model.addDataBreakpoint('label', 'id', true, ['read'], 'read'); const dataBreakpoints = model.getDataBreakpoints(); result = getBreakpointMessageAndIcon(State.Stopped, true, dataBreakpoints[0]); assert.strictEqual(result.message, 'Data Breakpoint'); assert.strictEqual(result.icon.id, 'debug-breakpoint-data'); const functionBreakpoint = model.addFunctionBreakpoint('foo', '1'); result = getBreakpointMessageAndIcon(State.Stopped, true, functionBreakpoint); assert.strictEqual(result.message, 'Function Breakpoint'); assert.strictEqual(result.icon.id, 'debug-breakpoint-function'); const data = new Map<string, DebugProtocol.Breakpoint>(); data.set(breakpoints[0].getId(), { verified: false, line: 10 }); data.set(breakpoints[1].getId(), { verified: true, line: 50 }); data.set(breakpoints[2].getId(), { verified: true, line: 50, message: 'world' }); data.set(functionBreakpoint.getId(), { verified: true }); model.setBreakpointSessionData('mocksessionid', { supportsFunctionBreakpoints: false, supportsDataBreakpoints: true, supportsLogPoints: true }, data); result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[0]); assert.strictEqual(result.message, 'Unverified Breakpoint'); assert.strictEqual(result.icon.id, 'debug-breakpoint-unverified'); result = getBreakpointMessageAndIcon(State.Stopped, true, functionBreakpoint); assert.strictEqual(result.message, 'Function breakpoints not supported by this debug type'); assert.strictEqual(result.icon.id, 'debug-breakpoint-function-unverified'); result = getBreakpointMessageAndIcon(State.Stopped, true, breakpoints[2]); assert.strictEqual(result.message, 'Log Message: hello, world'); assert.strictEqual(result.icon.id, 'debug-breakpoint-log'); }); test('decorations', () => { const modelUri = uri.file('/myfolder/my file first.js'); const languageId = 'testMode'; const textModel = createTextModel( ['this is line one', 'this is line two', ' this is line three it has whitespace at start', 'this is line four', 'this is line five'].join('\n'), languageId ); addBreakpointsAndCheckEvents(model, modelUri, [ { lineNumber: 1, enabled: true, condition: 'x > 5' }, { lineNumber: 2, column: 4, enabled: false }, { lineNumber: 3, enabled: true, logMessage: 'hello' }, { lineNumber: 500, enabled: true }, ]); const breakpoints = model.getBreakpoints(); const instantiationService = new TestInstantiationService(); instantiationService.stub(IDebugService, new MockDebugService()); instantiationService.stub(ILanguageService, disposables.add(new LanguageService())); let decorations = instantiationService.invokeFunction(accessor => createBreakpointDecorations(accessor, textModel, breakpoints, State.Running, true, true)); assert.strictEqual(decorations.length, 3); // last breakpoint filtered out since it has a large line number assert.deepStrictEqual(decorations[0].range, new Range(1, 1, 1, 2)); assert.deepStrictEqual(decorations[1].range, new Range(2, 4, 2, 5)); assert.deepStrictEqual(decorations[2].range, new Range(3, 5, 3, 6)); assert.strictEqual(decorations[0].options.beforeContentClassName, undefined); assert.strictEqual(decorations[1].options.before?.inlineClassName, `debug-breakpoint-placeholder`); assert.strictEqual(decorations[0].options.overviewRuler?.position, OverviewRulerLane.Left); const expected = new MarkdownString(undefined, { isTrusted: true, supportThemeIcons: true }).appendCodeblock(languageId, 'Expression condition: x > 5'); assert.deepStrictEqual(decorations[0].options.glyphMarginHoverMessage, expected); decorations = instantiationService.invokeFunction(accessor => createBreakpointDecorations(accessor, textModel, breakpoints, State.Running, true, false)); assert.strictEqual(decorations[0].options.overviewRuler, null); textModel.dispose(); }); });
src/vs/workbench/contrib/debug/test/browser/breakpoints.test.ts
0
https://github.com/microsoft/vscode/commit/f6de066b4c454569e9d91b8a6937a75b62712c73
[ 0.00018017017282545567, 0.0001763460604706779, 0.00016821462486404926, 0.00017698841111268848, 0.0000027688727186614415 ]