hunk
dict | file
stringlengths 0
11.8M
| file_path
stringlengths 2
234
| label
int64 0
1
| commit_url
stringlengths 74
103
| dependency_score
sequencelengths 5
5
|
---|---|---|---|---|---|
{
"id": 0,
"code_window": [
"\n",
"\tif err := teamguardian.CanAdmin(orgId, teamId, c.SignedInUser); err != nil {\n",
"\t\treturn Error(403, \"Not allowed to update team member\", err)\n",
"\t}\n",
"\n",
"\tcmd.TeamId = teamId\n",
"\tcmd.UserId = c.ParamsInt64(\":userId\")\n",
"\tcmd.OrgId = orgId\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tif c.OrgRole != m.ROLE_ADMIN {\n",
"\t\tcmd.ProtectLastAdmin = true\n",
"\t}\n",
"\n"
],
"file_path": "pkg/api/team_members.go",
"type": "add",
"edit_start_line_idx": 69
} | import _ from 'lodash';
const keywords = 'by|without|on|ignoring|group_left|group_right';
// Duplicate from mode-prometheus.js, which can't be used in tests due to global ace not being loaded.
const builtInWords = [
keywords,
'count|count_values|min|max|avg|sum|stddev|stdvar|bottomk|topk|quantile',
'true|false|null|__name__|job',
'abs|absent|ceil|changes|clamp_max|clamp_min|count_scalar|day_of_month|day_of_week|days_in_month|delta|deriv',
'drop_common_labels|exp|floor|histogram_quantile|holt_winters|hour|idelta|increase|irate|label_replace|ln|log2',
'log10|minute|month|predict_linear|rate|resets|round|scalar|sort|sort_desc|sqrt|time|vector|year|avg_over_time',
'min_over_time|max_over_time|sum_over_time|count_over_time|quantile_over_time|stddev_over_time|stdvar_over_time',
]
.join('|')
.split('|');
const metricNameRegexp = /([A-Za-z:][\w:]*)\b(?![\(\]{=!",])/g;
const selectorRegexp = /{([^{]*)}/g;
// addLabelToQuery('foo', 'bar', 'baz') => 'foo{bar="baz"}'
export function addLabelToQuery(query: string, key: string, value: string, operator?: string): string {
if (!key || !value) {
throw new Error('Need label to add to query.');
}
// Add empty selectors to bare metric names
let previousWord;
query = query.replace(metricNameRegexp, (match, word, offset) => {
const insideSelector = isPositionInsideChars(query, offset, '{', '}');
// Handle "sum by (key) (metric)"
const previousWordIsKeyWord = previousWord && keywords.split('|').indexOf(previousWord) > -1;
previousWord = word;
if (!insideSelector && !previousWordIsKeyWord && builtInWords.indexOf(word) === -1) {
return `${word}{}`;
}
return word;
});
// Adding label to existing selectors
let match = selectorRegexp.exec(query);
const parts = [];
let lastIndex = 0;
let suffix = '';
while (match) {
const prefix = query.slice(lastIndex, match.index);
const selector = match[1];
const selectorWithLabel = addLabelToSelector(selector, key, value, operator);
lastIndex = match.index + match[1].length + 2;
suffix = query.slice(match.index + match[0].length);
parts.push(prefix, selectorWithLabel);
match = selectorRegexp.exec(query);
}
parts.push(suffix);
return parts.join('');
}
const labelRegexp = /(\w+)\s*(=|!=|=~|!~)\s*("[^"]*")/g;
export function addLabelToSelector(selector: string, labelKey: string, labelValue: string, labelOperator?: string) {
const parsedLabels = [];
// Split selector into labels
if (selector) {
let match = labelRegexp.exec(selector);
while (match) {
parsedLabels.push({ key: match[1], operator: match[2], value: match[3] });
match = labelRegexp.exec(selector);
}
}
// Add new label
const operatorForLabelKey = labelOperator || '=';
parsedLabels.push({ key: labelKey, operator: operatorForLabelKey, value: `"${labelValue}"` });
// Sort labels by key and put them together
const formatted = _.chain(parsedLabels)
.uniqWith(_.isEqual)
.compact()
.sortBy('key')
.map(({ key, operator, value }) => `${key}${operator}${value}`)
.value()
.join(',');
return `{${formatted}}`;
}
function isPositionInsideChars(text: string, position: number, openChar: string, closeChar: string) {
const nextSelectorStart = text.slice(position).indexOf(openChar);
const nextSelectorEnd = text.slice(position).indexOf(closeChar);
return nextSelectorEnd > -1 && (nextSelectorStart === -1 || nextSelectorStart > nextSelectorEnd);
}
export default addLabelToQuery;
| public/app/plugins/datasource/prometheus/add_label_to_query.ts | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.0001760039885994047,
0.00017276844300795346,
0.00016605127893853933,
0.00017374737944919616,
0.000002551524630689528
] |
{
"id": 0,
"code_window": [
"\n",
"\tif err := teamguardian.CanAdmin(orgId, teamId, c.SignedInUser); err != nil {\n",
"\t\treturn Error(403, \"Not allowed to update team member\", err)\n",
"\t}\n",
"\n",
"\tcmd.TeamId = teamId\n",
"\tcmd.UserId = c.ParamsInt64(\":userId\")\n",
"\tcmd.OrgId = orgId\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tif c.OrgRole != m.ROLE_ADMIN {\n",
"\t\tcmd.ProtectLastAdmin = true\n",
"\t}\n",
"\n"
],
"file_path": "pkg/api/team_members.go",
"type": "add",
"edit_start_line_idx": 69
} | package mssql
import (
"context"
"crypto/tls"
"crypto/x509"
"encoding/binary"
"errors"
"fmt"
"io"
"io/ioutil"
"net"
"net/url"
"os"
"sort"
"strconv"
"strings"
"time"
"unicode"
"unicode/utf16"
"unicode/utf8"
)
func parseInstances(msg []byte) map[string]map[string]string {
results := map[string]map[string]string{}
if len(msg) > 3 && msg[0] == 5 {
out_s := string(msg[3:])
tokens := strings.Split(out_s, ";")
instdict := map[string]string{}
got_name := false
var name string
for _, token := range tokens {
if got_name {
instdict[name] = token
got_name = false
} else {
name = token
if len(name) == 0 {
if len(instdict) == 0 {
break
}
results[strings.ToUpper(instdict["InstanceName"])] = instdict
instdict = map[string]string{}
continue
}
got_name = true
}
}
}
return results
}
func getInstances(ctx context.Context, address string) (map[string]map[string]string, error) {
maxTime := 5 * time.Second
dialer := &net.Dialer{
Timeout: maxTime,
}
conn, err := dialer.DialContext(ctx, "udp", address+":1434")
if err != nil {
return nil, err
}
defer conn.Close()
conn.SetDeadline(time.Now().Add(maxTime))
_, err = conn.Write([]byte{3})
if err != nil {
return nil, err
}
var resp = make([]byte, 16*1024-1)
read, err := conn.Read(resp)
if err != nil {
return nil, err
}
return parseInstances(resp[:read]), nil
}
// tds versions
const (
verTDS70 = 0x70000000
verTDS71 = 0x71000000
verTDS71rev1 = 0x71000001
verTDS72 = 0x72090002
verTDS73A = 0x730A0003
verTDS73 = verTDS73A
verTDS73B = 0x730B0003
verTDS74 = 0x74000004
)
// packet types
// https://msdn.microsoft.com/en-us/library/dd304214.aspx
const (
packSQLBatch packetType = 1
packRPCRequest = 3
packReply = 4
// 2.2.1.7 Attention: https://msdn.microsoft.com/en-us/library/dd341449.aspx
// 4.19.2 Out-of-Band Attention Signal: https://msdn.microsoft.com/en-us/library/dd305167.aspx
packAttention = 6
packBulkLoadBCP = 7
packTransMgrReq = 14
packNormal = 15
packLogin7 = 16
packSSPIMessage = 17
packPrelogin = 18
)
// prelogin fields
// http://msdn.microsoft.com/en-us/library/dd357559.aspx
const (
preloginVERSION = 0
preloginENCRYPTION = 1
preloginINSTOPT = 2
preloginTHREADID = 3
preloginMARS = 4
preloginTRACEID = 5
preloginTERMINATOR = 0xff
)
const (
encryptOff = 0 // Encryption is available but off.
encryptOn = 1 // Encryption is available and on.
encryptNotSup = 2 // Encryption is not available.
encryptReq = 3 // Encryption is required.
)
type tdsSession struct {
buf *tdsBuffer
loginAck loginAckStruct
database string
partner string
columns []columnStruct
tranid uint64
logFlags uint64
log optionalLogger
routedServer string
routedPort uint16
}
const (
logErrors = 1
logMessages = 2
logRows = 4
logSQL = 8
logParams = 16
logTransaction = 32
logDebug = 64
)
type columnStruct struct {
UserType uint32
Flags uint16
ColName string
ti typeInfo
}
type KeySlice []uint8
func (p KeySlice) Len() int { return len(p) }
func (p KeySlice) Less(i, j int) bool { return p[i] < p[j] }
func (p KeySlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// http://msdn.microsoft.com/en-us/library/dd357559.aspx
func writePrelogin(w *tdsBuffer, fields map[uint8][]byte) error {
var err error
w.BeginPacket(packPrelogin, false)
offset := uint16(5*len(fields) + 1)
keys := make(KeySlice, 0, len(fields))
for k, _ := range fields {
keys = append(keys, k)
}
sort.Sort(keys)
// writing header
for _, k := range keys {
err = w.WriteByte(k)
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, offset)
if err != nil {
return err
}
v := fields[k]
size := uint16(len(v))
err = binary.Write(w, binary.BigEndian, size)
if err != nil {
return err
}
offset += size
}
err = w.WriteByte(preloginTERMINATOR)
if err != nil {
return err
}
// writing values
for _, k := range keys {
v := fields[k]
written, err := w.Write(v)
if err != nil {
return err
}
if written != len(v) {
return errors.New("Write method didn't write the whole value")
}
}
return w.FinishPacket()
}
func readPrelogin(r *tdsBuffer) (map[uint8][]byte, error) {
packet_type, err := r.BeginRead()
if err != nil {
return nil, err
}
struct_buf, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
if packet_type != 4 {
return nil, errors.New("Invalid respones, expected packet type 4, PRELOGIN RESPONSE")
}
offset := 0
results := map[uint8][]byte{}
for true {
rec_type := struct_buf[offset]
if rec_type == preloginTERMINATOR {
break
}
rec_offset := binary.BigEndian.Uint16(struct_buf[offset+1:])
rec_len := binary.BigEndian.Uint16(struct_buf[offset+3:])
value := struct_buf[rec_offset : rec_offset+rec_len]
results[rec_type] = value
offset += 5
}
return results, nil
}
// OptionFlags2
// http://msdn.microsoft.com/en-us/library/dd304019.aspx
const (
fLanguageFatal = 1
fODBC = 2
fTransBoundary = 4
fCacheConnect = 8
fIntSecurity = 0x80
)
// TypeFlags
const (
// 4 bits for fSQLType
// 1 bit for fOLEDB
fReadOnlyIntent = 32
)
type login struct {
TDSVersion uint32
PacketSize uint32
ClientProgVer uint32
ClientPID uint32
ConnectionID uint32
OptionFlags1 uint8
OptionFlags2 uint8
TypeFlags uint8
OptionFlags3 uint8
ClientTimeZone int32
ClientLCID uint32
HostName string
UserName string
Password string
AppName string
ServerName string
CtlIntName string
Language string
Database string
ClientID [6]byte
SSPI []byte
AtchDBFile string
ChangePassword string
}
type loginHeader struct {
Length uint32
TDSVersion uint32
PacketSize uint32
ClientProgVer uint32
ClientPID uint32
ConnectionID uint32
OptionFlags1 uint8
OptionFlags2 uint8
TypeFlags uint8
OptionFlags3 uint8
ClientTimeZone int32
ClientLCID uint32
HostNameOffset uint16
HostNameLength uint16
UserNameOffset uint16
UserNameLength uint16
PasswordOffset uint16
PasswordLength uint16
AppNameOffset uint16
AppNameLength uint16
ServerNameOffset uint16
ServerNameLength uint16
ExtensionOffset uint16
ExtensionLenght uint16
CtlIntNameOffset uint16
CtlIntNameLength uint16
LanguageOffset uint16
LanguageLength uint16
DatabaseOffset uint16
DatabaseLength uint16
ClientID [6]byte
SSPIOffset uint16
SSPILength uint16
AtchDBFileOffset uint16
AtchDBFileLength uint16
ChangePasswordOffset uint16
ChangePasswordLength uint16
SSPILongLength uint32
}
// convert Go string to UTF-16 encoded []byte (littleEndian)
// done manually rather than using bytes and binary packages
// for performance reasons
func str2ucs2(s string) []byte {
res := utf16.Encode([]rune(s))
ucs2 := make([]byte, 2*len(res))
for i := 0; i < len(res); i++ {
ucs2[2*i] = byte(res[i])
ucs2[2*i+1] = byte(res[i] >> 8)
}
return ucs2
}
func ucs22str(s []byte) (string, error) {
if len(s)%2 != 0 {
return "", fmt.Errorf("Illegal UCS2 string length: %d", len(s))
}
buf := make([]uint16, len(s)/2)
for i := 0; i < len(s); i += 2 {
buf[i/2] = binary.LittleEndian.Uint16(s[i:])
}
return string(utf16.Decode(buf)), nil
}
func manglePassword(password string) []byte {
var ucs2password []byte = str2ucs2(password)
for i, ch := range ucs2password {
ucs2password[i] = ((ch<<4)&0xff | (ch >> 4)) ^ 0xA5
}
return ucs2password
}
// http://msdn.microsoft.com/en-us/library/dd304019.aspx
func sendLogin(w *tdsBuffer, login login) error {
w.BeginPacket(packLogin7, false)
hostname := str2ucs2(login.HostName)
username := str2ucs2(login.UserName)
password := manglePassword(login.Password)
appname := str2ucs2(login.AppName)
servername := str2ucs2(login.ServerName)
ctlintname := str2ucs2(login.CtlIntName)
language := str2ucs2(login.Language)
database := str2ucs2(login.Database)
atchdbfile := str2ucs2(login.AtchDBFile)
changepassword := str2ucs2(login.ChangePassword)
hdr := loginHeader{
TDSVersion: login.TDSVersion,
PacketSize: login.PacketSize,
ClientProgVer: login.ClientProgVer,
ClientPID: login.ClientPID,
ConnectionID: login.ConnectionID,
OptionFlags1: login.OptionFlags1,
OptionFlags2: login.OptionFlags2,
TypeFlags: login.TypeFlags,
OptionFlags3: login.OptionFlags3,
ClientTimeZone: login.ClientTimeZone,
ClientLCID: login.ClientLCID,
HostNameLength: uint16(utf8.RuneCountInString(login.HostName)),
UserNameLength: uint16(utf8.RuneCountInString(login.UserName)),
PasswordLength: uint16(utf8.RuneCountInString(login.Password)),
AppNameLength: uint16(utf8.RuneCountInString(login.AppName)),
ServerNameLength: uint16(utf8.RuneCountInString(login.ServerName)),
CtlIntNameLength: uint16(utf8.RuneCountInString(login.CtlIntName)),
LanguageLength: uint16(utf8.RuneCountInString(login.Language)),
DatabaseLength: uint16(utf8.RuneCountInString(login.Database)),
ClientID: login.ClientID,
SSPILength: uint16(len(login.SSPI)),
AtchDBFileLength: uint16(utf8.RuneCountInString(login.AtchDBFile)),
ChangePasswordLength: uint16(utf8.RuneCountInString(login.ChangePassword)),
}
offset := uint16(binary.Size(hdr))
hdr.HostNameOffset = offset
offset += uint16(len(hostname))
hdr.UserNameOffset = offset
offset += uint16(len(username))
hdr.PasswordOffset = offset
offset += uint16(len(password))
hdr.AppNameOffset = offset
offset += uint16(len(appname))
hdr.ServerNameOffset = offset
offset += uint16(len(servername))
hdr.CtlIntNameOffset = offset
offset += uint16(len(ctlintname))
hdr.LanguageOffset = offset
offset += uint16(len(language))
hdr.DatabaseOffset = offset
offset += uint16(len(database))
hdr.SSPIOffset = offset
offset += uint16(len(login.SSPI))
hdr.AtchDBFileOffset = offset
offset += uint16(len(atchdbfile))
hdr.ChangePasswordOffset = offset
offset += uint16(len(changepassword))
hdr.Length = uint32(offset)
var err error
err = binary.Write(w, binary.LittleEndian, &hdr)
if err != nil {
return err
}
_, err = w.Write(hostname)
if err != nil {
return err
}
_, err = w.Write(username)
if err != nil {
return err
}
_, err = w.Write(password)
if err != nil {
return err
}
_, err = w.Write(appname)
if err != nil {
return err
}
_, err = w.Write(servername)
if err != nil {
return err
}
_, err = w.Write(ctlintname)
if err != nil {
return err
}
_, err = w.Write(language)
if err != nil {
return err
}
_, err = w.Write(database)
if err != nil {
return err
}
_, err = w.Write(login.SSPI)
if err != nil {
return err
}
_, err = w.Write(atchdbfile)
if err != nil {
return err
}
_, err = w.Write(changepassword)
if err != nil {
return err
}
return w.FinishPacket()
}
func readUcs2(r io.Reader, numchars int) (res string, err error) {
buf := make([]byte, numchars*2)
_, err = io.ReadFull(r, buf)
if err != nil {
return "", err
}
return ucs22str(buf)
}
func readUsVarChar(r io.Reader) (res string, err error) {
var numchars uint16
err = binary.Read(r, binary.LittleEndian, &numchars)
if err != nil {
return "", err
}
return readUcs2(r, int(numchars))
}
func writeUsVarChar(w io.Writer, s string) (err error) {
buf := str2ucs2(s)
var numchars int = len(buf) / 2
if numchars > 0xffff {
panic("invalid size for US_VARCHAR")
}
err = binary.Write(w, binary.LittleEndian, uint16(numchars))
if err != nil {
return
}
_, err = w.Write(buf)
return
}
func readBVarChar(r io.Reader) (res string, err error) {
var numchars uint8
err = binary.Read(r, binary.LittleEndian, &numchars)
if err != nil {
return "", err
}
// A zero length could be returned, return an empty string
if numchars == 0 {
return "", nil
}
return readUcs2(r, int(numchars))
}
func writeBVarChar(w io.Writer, s string) (err error) {
buf := str2ucs2(s)
var numchars int = len(buf) / 2
if numchars > 0xff {
panic("invalid size for B_VARCHAR")
}
err = binary.Write(w, binary.LittleEndian, uint8(numchars))
if err != nil {
return
}
_, err = w.Write(buf)
return
}
func readBVarByte(r io.Reader) (res []byte, err error) {
var length uint8
err = binary.Read(r, binary.LittleEndian, &length)
if err != nil {
return
}
res = make([]byte, length)
_, err = io.ReadFull(r, res)
return
}
func readUshort(r io.Reader) (res uint16, err error) {
err = binary.Read(r, binary.LittleEndian, &res)
return
}
func readByte(r io.Reader) (res byte, err error) {
var b [1]byte
_, err = r.Read(b[:])
res = b[0]
return
}
// Packet Data Stream Headers
// http://msdn.microsoft.com/en-us/library/dd304953.aspx
type headerStruct struct {
hdrtype uint16
data []byte
}
const (
dataStmHdrQueryNotif = 1 // query notifications
dataStmHdrTransDescr = 2 // MARS transaction descriptor (required)
dataStmHdrTraceActivity = 3
)
// Query Notifications Header
// http://msdn.microsoft.com/en-us/library/dd304949.aspx
type queryNotifHdr struct {
notifyId string
ssbDeployment string
notifyTimeout uint32
}
func (hdr queryNotifHdr) pack() (res []byte) {
notifyId := str2ucs2(hdr.notifyId)
ssbDeployment := str2ucs2(hdr.ssbDeployment)
res = make([]byte, 2+len(notifyId)+2+len(ssbDeployment)+4)
b := res
binary.LittleEndian.PutUint16(b, uint16(len(notifyId)))
b = b[2:]
copy(b, notifyId)
b = b[len(notifyId):]
binary.LittleEndian.PutUint16(b, uint16(len(ssbDeployment)))
b = b[2:]
copy(b, ssbDeployment)
b = b[len(ssbDeployment):]
binary.LittleEndian.PutUint32(b, hdr.notifyTimeout)
return res
}
// MARS Transaction Descriptor Header
// http://msdn.microsoft.com/en-us/library/dd340515.aspx
type transDescrHdr struct {
transDescr uint64 // transaction descriptor returned from ENVCHANGE
outstandingReqCnt uint32 // outstanding request count
}
func (hdr transDescrHdr) pack() (res []byte) {
res = make([]byte, 8+4)
binary.LittleEndian.PutUint64(res, hdr.transDescr)
binary.LittleEndian.PutUint32(res[8:], hdr.outstandingReqCnt)
return res
}
func writeAllHeaders(w io.Writer, headers []headerStruct) (err error) {
// Calculating total length.
var totallen uint32 = 4
for _, hdr := range headers {
totallen += 4 + 2 + uint32(len(hdr.data))
}
// writing
err = binary.Write(w, binary.LittleEndian, totallen)
if err != nil {
return err
}
for _, hdr := range headers {
var headerlen uint32 = 4 + 2 + uint32(len(hdr.data))
err = binary.Write(w, binary.LittleEndian, headerlen)
if err != nil {
return err
}
err = binary.Write(w, binary.LittleEndian, hdr.hdrtype)
if err != nil {
return err
}
_, err = w.Write(hdr.data)
if err != nil {
return err
}
}
return nil
}
func sendSqlBatch72(buf *tdsBuffer, sqltext string, headers []headerStruct, resetSession bool) (err error) {
buf.BeginPacket(packSQLBatch, resetSession)
if err = writeAllHeaders(buf, headers); err != nil {
return
}
_, err = buf.Write(str2ucs2(sqltext))
if err != nil {
return
}
return buf.FinishPacket()
}
// 2.2.1.7 Attention: https://msdn.microsoft.com/en-us/library/dd341449.aspx
// 4.19.2 Out-of-Band Attention Signal: https://msdn.microsoft.com/en-us/library/dd305167.aspx
func sendAttention(buf *tdsBuffer) error {
buf.BeginPacket(packAttention, false)
return buf.FinishPacket()
}
type connectParams struct {
logFlags uint64
port uint64
host string
instance string
database string
user string
password string
dial_timeout time.Duration
conn_timeout time.Duration
keepAlive time.Duration
encrypt bool
disableEncryption bool
trustServerCertificate bool
certificate string
hostInCertificate string
serverSPN string
workstation string
appname string
typeFlags uint8
failOverPartner string
failOverPort uint64
packetSize uint16
}
func splitConnectionString(dsn string) (res map[string]string) {
res = map[string]string{}
parts := strings.Split(dsn, ";")
for _, part := range parts {
if len(part) == 0 {
continue
}
lst := strings.SplitN(part, "=", 2)
name := strings.TrimSpace(strings.ToLower(lst[0]))
if len(name) == 0 {
continue
}
var value string = ""
if len(lst) > 1 {
value = strings.TrimSpace(lst[1])
}
res[name] = value
}
return res
}
// Splits a URL in the ODBC format
func splitConnectionStringOdbc(dsn string) (map[string]string, error) {
res := map[string]string{}
type parserState int
const (
// Before the start of a key
parserStateBeforeKey parserState = iota
// Inside a key
parserStateKey
// Beginning of a value. May be bare or braced
parserStateBeginValue
// Inside a bare value
parserStateBareValue
// Inside a braced value
parserStateBracedValue
// A closing brace inside a braced value.
// May be the end of the value or an escaped closing brace, depending on the next character
parserStateBracedValueClosingBrace
// After a value. Next character should be a semicolon or whitespace.
parserStateEndValue
)
var state = parserStateBeforeKey
var key string
var value string
for i, c := range dsn {
switch state {
case parserStateBeforeKey:
switch {
case c == '=':
return res, fmt.Errorf("Unexpected character = at index %d. Expected start of key or semi-colon or whitespace.", i)
case !unicode.IsSpace(c) && c != ';':
state = parserStateKey
key += string(c)
}
case parserStateKey:
switch c {
case '=':
key = normalizeOdbcKey(key)
if len(key) == 0 {
return res, fmt.Errorf("Unexpected end of key at index %d.", i)
}
state = parserStateBeginValue
case ';':
// Key without value
key = normalizeOdbcKey(key)
if len(key) == 0 {
return res, fmt.Errorf("Unexpected end of key at index %d.", i)
}
res[key] = value
key = ""
value = ""
state = parserStateBeforeKey
default:
key += string(c)
}
case parserStateBeginValue:
switch {
case c == '{':
state = parserStateBracedValue
case c == ';':
// Empty value
res[key] = value
key = ""
state = parserStateBeforeKey
case unicode.IsSpace(c):
// Ignore whitespace
default:
state = parserStateBareValue
value += string(c)
}
case parserStateBareValue:
if c == ';' {
res[key] = strings.TrimRightFunc(value, unicode.IsSpace)
key = ""
value = ""
state = parserStateBeforeKey
} else {
value += string(c)
}
case parserStateBracedValue:
if c == '}' {
state = parserStateBracedValueClosingBrace
} else {
value += string(c)
}
case parserStateBracedValueClosingBrace:
if c == '}' {
// Escaped closing brace
value += string(c)
state = parserStateBracedValue
continue
}
// End of braced value
res[key] = value
key = ""
value = ""
// This character is the first character past the end,
// so it needs to be parsed like the parserStateEndValue state.
state = parserStateEndValue
switch {
case c == ';':
state = parserStateBeforeKey
case unicode.IsSpace(c):
// Ignore whitespace
default:
return res, fmt.Errorf("Unexpected character %c at index %d. Expected semi-colon or whitespace.", c, i)
}
case parserStateEndValue:
switch {
case c == ';':
state = parserStateBeforeKey
case unicode.IsSpace(c):
// Ignore whitespace
default:
return res, fmt.Errorf("Unexpected character %c at index %d. Expected semi-colon or whitespace.", c, i)
}
}
}
switch state {
case parserStateBeforeKey: // Okay
case parserStateKey: // Unfinished key. Treat as key without value.
key = normalizeOdbcKey(key)
if len(key) == 0 {
return res, fmt.Errorf("Unexpected end of key at index %d.", len(dsn))
}
res[key] = value
case parserStateBeginValue: // Empty value
res[key] = value
case parserStateBareValue:
res[key] = strings.TrimRightFunc(value, unicode.IsSpace)
case parserStateBracedValue:
return res, fmt.Errorf("Unexpected end of braced value at index %d.", len(dsn))
case parserStateBracedValueClosingBrace: // End of braced value
res[key] = value
case parserStateEndValue: // Okay
}
return res, nil
}
// Normalizes the given string as an ODBC-format key
func normalizeOdbcKey(s string) string {
return strings.ToLower(strings.TrimRightFunc(s, unicode.IsSpace))
}
// Splits a URL of the form sqlserver://username:password@host/instance?param1=value¶m2=value
func splitConnectionStringURL(dsn string) (map[string]string, error) {
res := map[string]string{}
u, err := url.Parse(dsn)
if err != nil {
return res, err
}
if u.Scheme != "sqlserver" {
return res, fmt.Errorf("scheme %s is not recognized", u.Scheme)
}
if u.User != nil {
res["user id"] = u.User.Username()
p, exists := u.User.Password()
if exists {
res["password"] = p
}
}
host, port, err := net.SplitHostPort(u.Host)
if err != nil {
host = u.Host
}
if len(u.Path) > 0 {
res["server"] = host + "\\" + u.Path[1:]
} else {
res["server"] = host
}
if len(port) > 0 {
res["port"] = port
}
query := u.Query()
for k, v := range query {
if len(v) > 1 {
return res, fmt.Errorf("key %s provided more than once", k)
}
res[strings.ToLower(k)] = v[0]
}
return res, nil
}
func parseConnectParams(dsn string) (connectParams, error) {
var p connectParams
var params map[string]string
if strings.HasPrefix(dsn, "odbc:") {
parameters, err := splitConnectionStringOdbc(dsn[len("odbc:"):])
if err != nil {
return p, err
}
params = parameters
} else if strings.HasPrefix(dsn, "sqlserver://") {
parameters, err := splitConnectionStringURL(dsn)
if err != nil {
return p, err
}
params = parameters
} else {
params = splitConnectionString(dsn)
}
strlog, ok := params["log"]
if ok {
var err error
p.logFlags, err = strconv.ParseUint(strlog, 10, 64)
if err != nil {
return p, fmt.Errorf("Invalid log parameter '%s': %s", strlog, err.Error())
}
}
server := params["server"]
parts := strings.SplitN(server, `\`, 2)
p.host = parts[0]
if p.host == "." || strings.ToUpper(p.host) == "(LOCAL)" || p.host == "" {
p.host = "localhost"
}
if len(parts) > 1 {
p.instance = parts[1]
}
p.database = params["database"]
p.user = params["user id"]
p.password = params["password"]
p.port = 1433
strport, ok := params["port"]
if ok {
var err error
p.port, err = strconv.ParseUint(strport, 10, 16)
if err != nil {
f := "Invalid tcp port '%v': %v"
return p, fmt.Errorf(f, strport, err.Error())
}
}
// https://docs.microsoft.com/en-us/sql/database-engine/configure-windows/configure-the-network-packet-size-server-configuration-option
// Default packet size remains at 4096 bytes
p.packetSize = 4096
strpsize, ok := params["packet size"]
if ok {
var err error
psize, err := strconv.ParseUint(strpsize, 0, 16)
if err != nil {
f := "Invalid packet size '%v': %v"
return p, fmt.Errorf(f, strpsize, err.Error())
}
// Ensure packet size falls within the TDS protocol range of 512 to 32767 bytes
// NOTE: Encrypted connections have a maximum size of 16383 bytes. If you request
// a higher packet size, the server will respond with an ENVCHANGE request to
// alter the packet size to 16383 bytes.
p.packetSize = uint16(psize)
if p.packetSize < 512 {
p.packetSize = 512
} else if p.packetSize > 32767 {
p.packetSize = 32767
}
}
// https://msdn.microsoft.com/en-us/library/dd341108.aspx
p.dial_timeout = 15 * time.Second
p.conn_timeout = 30 * time.Second
strconntimeout, ok := params["connection timeout"]
if ok {
timeout, err := strconv.ParseUint(strconntimeout, 10, 64)
if err != nil {
f := "Invalid connection timeout '%v': %v"
return p, fmt.Errorf(f, strconntimeout, err.Error())
}
p.conn_timeout = time.Duration(timeout) * time.Second
}
strdialtimeout, ok := params["dial timeout"]
if ok {
timeout, err := strconv.ParseUint(strdialtimeout, 10, 64)
if err != nil {
f := "Invalid dial timeout '%v': %v"
return p, fmt.Errorf(f, strdialtimeout, err.Error())
}
p.dial_timeout = time.Duration(timeout) * time.Second
}
// default keep alive should be 30 seconds according to spec:
// https://msdn.microsoft.com/en-us/library/dd341108.aspx
p.keepAlive = 30 * time.Second
if keepAlive, ok := params["keepalive"]; ok {
timeout, err := strconv.ParseUint(keepAlive, 10, 64)
if err != nil {
f := "Invalid keepAlive value '%s': %s"
return p, fmt.Errorf(f, keepAlive, err.Error())
}
p.keepAlive = time.Duration(timeout) * time.Second
}
encrypt, ok := params["encrypt"]
if ok {
if strings.EqualFold(encrypt, "DISABLE") {
p.disableEncryption = true
} else {
var err error
p.encrypt, err = strconv.ParseBool(encrypt)
if err != nil {
f := "Invalid encrypt '%s': %s"
return p, fmt.Errorf(f, encrypt, err.Error())
}
}
} else {
p.trustServerCertificate = true
}
trust, ok := params["trustservercertificate"]
if ok {
var err error
p.trustServerCertificate, err = strconv.ParseBool(trust)
if err != nil {
f := "Invalid trust server certificate '%s': %s"
return p, fmt.Errorf(f, trust, err.Error())
}
}
p.certificate = params["certificate"]
p.hostInCertificate, ok = params["hostnameincertificate"]
if !ok {
p.hostInCertificate = p.host
}
serverSPN, ok := params["serverspn"]
if ok {
p.serverSPN = serverSPN
} else {
p.serverSPN = fmt.Sprintf("MSSQLSvc/%s:%d", p.host, p.port)
}
workstation, ok := params["workstation id"]
if ok {
p.workstation = workstation
} else {
workstation, err := os.Hostname()
if err == nil {
p.workstation = workstation
}
}
appname, ok := params["app name"]
if !ok {
appname = "go-mssqldb"
}
p.appname = appname
appintent, ok := params["applicationintent"]
if ok {
if appintent == "ReadOnly" {
p.typeFlags |= fReadOnlyIntent
}
}
failOverPartner, ok := params["failoverpartner"]
if ok {
p.failOverPartner = failOverPartner
}
failOverPort, ok := params["failoverport"]
if ok {
var err error
p.failOverPort, err = strconv.ParseUint(failOverPort, 0, 16)
if err != nil {
f := "Invalid tcp port '%v': %v"
return p, fmt.Errorf(f, failOverPort, err.Error())
}
}
return p, nil
}
type auth interface {
InitialBytes() ([]byte, error)
NextBytes([]byte) ([]byte, error)
Free()
}
// SQL Server AlwaysOn Availability Group Listeners are bound by DNS to a
// list of IP addresses. So if there is more than one, try them all and
// use the first one that allows a connection.
func dialConnection(ctx context.Context, p connectParams) (conn net.Conn, err error) {
var ips []net.IP
ips, err = net.LookupIP(p.host)
if err != nil {
ip := net.ParseIP(p.host)
if ip == nil {
return nil, err
}
ips = []net.IP{ip}
}
if len(ips) == 1 {
d := createDialer(&p)
addr := net.JoinHostPort(ips[0].String(), strconv.Itoa(int(p.port)))
conn, err = d.Dial(ctx, addr)
} else {
//Try Dials in parallel to avoid waiting for timeouts.
connChan := make(chan net.Conn, len(ips))
errChan := make(chan error, len(ips))
portStr := strconv.Itoa(int(p.port))
for _, ip := range ips {
go func(ip net.IP) {
d := createDialer(&p)
addr := net.JoinHostPort(ip.String(), portStr)
conn, err := d.Dial(ctx, addr)
if err == nil {
connChan <- conn
} else {
errChan <- err
}
}(ip)
}
// Wait for either the *first* successful connection, or all the errors
wait_loop:
for i, _ := range ips {
select {
case conn = <-connChan:
// Got a connection to use, close any others
go func(n int) {
for i := 0; i < n; i++ {
select {
case conn := <-connChan:
conn.Close()
case <-errChan:
}
}
}(len(ips) - i - 1)
// Remove any earlier errors we may have collected
err = nil
break wait_loop
case err = <-errChan:
}
}
}
// Can't do the usual err != nil check, as it is possible to have gotten an error before a successful connection
if conn == nil {
f := "Unable to open tcp connection with host '%v:%v': %v"
return nil, fmt.Errorf(f, p.host, p.port, err.Error())
}
return conn, err
}
func connect(ctx context.Context, log optionalLogger, p connectParams) (res *tdsSession, err error) {
dialCtx := ctx
if p.dial_timeout > 0 {
var cancel func()
dialCtx, cancel = context.WithTimeout(ctx, p.dial_timeout)
defer cancel()
}
// if instance is specified use instance resolution service
if p.instance != "" {
p.instance = strings.ToUpper(p.instance)
instances, err := getInstances(dialCtx, p.host)
if err != nil {
f := "Unable to get instances from Sql Server Browser on host %v: %v"
return nil, fmt.Errorf(f, p.host, err.Error())
}
strport, ok := instances[p.instance]["tcp"]
if !ok {
f := "No instance matching '%v' returned from host '%v'"
return nil, fmt.Errorf(f, p.instance, p.host)
}
p.port, err = strconv.ParseUint(strport, 0, 16)
if err != nil {
f := "Invalid tcp port returned from Sql Server Browser '%v': %v"
return nil, fmt.Errorf(f, strport, err.Error())
}
}
initiate_connection:
conn, err := dialConnection(dialCtx, p)
if err != nil {
return nil, err
}
toconn := NewTimeoutConn(conn, p.conn_timeout)
outbuf := newTdsBuffer(p.packetSize, toconn)
sess := tdsSession{
buf: outbuf,
log: log,
logFlags: p.logFlags,
}
instance_buf := []byte(p.instance)
instance_buf = append(instance_buf, 0) // zero terminate instance name
var encrypt byte
if p.disableEncryption {
encrypt = encryptNotSup
} else if p.encrypt {
encrypt = encryptOn
} else {
encrypt = encryptOff
}
fields := map[uint8][]byte{
preloginVERSION: {0, 0, 0, 0, 0, 0},
preloginENCRYPTION: {encrypt},
preloginINSTOPT: instance_buf,
preloginTHREADID: {0, 0, 0, 0},
preloginMARS: {0}, // MARS disabled
}
err = writePrelogin(outbuf, fields)
if err != nil {
return nil, err
}
fields, err = readPrelogin(outbuf)
if err != nil {
return nil, err
}
encryptBytes, ok := fields[preloginENCRYPTION]
if !ok {
return nil, fmt.Errorf("Encrypt negotiation failed")
}
encrypt = encryptBytes[0]
if p.encrypt && (encrypt == encryptNotSup || encrypt == encryptOff) {
return nil, fmt.Errorf("Server does not support encryption")
}
if encrypt != encryptNotSup {
var config tls.Config
if p.certificate != "" {
pem, err := ioutil.ReadFile(p.certificate)
if err != nil {
return nil, fmt.Errorf("Cannot read certificate %q: %v", p.certificate, err)
}
certs := x509.NewCertPool()
certs.AppendCertsFromPEM(pem)
config.RootCAs = certs
}
if p.trustServerCertificate {
config.InsecureSkipVerify = true
}
config.ServerName = p.hostInCertificate
// fix for https://github.com/denisenkom/go-mssqldb/issues/166
// Go implementation of TLS payload size heuristic algorithm splits single TDS package to multiple TCP segments,
// while SQL Server seems to expect one TCP segment per encrypted TDS package.
// Setting DynamicRecordSizingDisabled to true disables that algorithm and uses 16384 bytes per TLS package
config.DynamicRecordSizingDisabled = true
outbuf.transport = conn
toconn.buf = outbuf
tlsConn := tls.Client(toconn, &config)
err = tlsConn.Handshake()
toconn.buf = nil
outbuf.transport = tlsConn
if err != nil {
return nil, fmt.Errorf("TLS Handshake failed: %v", err)
}
if encrypt == encryptOff {
outbuf.afterFirst = func() {
outbuf.transport = toconn
}
}
}
login := login{
TDSVersion: verTDS74,
PacketSize: uint32(outbuf.PackageSize()),
Database: p.database,
OptionFlags2: fODBC, // to get unlimited TEXTSIZE
HostName: p.workstation,
ServerName: p.host,
AppName: p.appname,
TypeFlags: p.typeFlags,
}
auth, auth_ok := getAuth(p.user, p.password, p.serverSPN, p.workstation)
if auth_ok {
login.SSPI, err = auth.InitialBytes()
if err != nil {
return nil, err
}
login.OptionFlags2 |= fIntSecurity
defer auth.Free()
} else {
login.UserName = p.user
login.Password = p.password
}
err = sendLogin(outbuf, login)
if err != nil {
return nil, err
}
// processing login response
var sspi_msg []byte
continue_login:
tokchan := make(chan tokenStruct, 5)
go processResponse(context.Background(), &sess, tokchan, nil)
success := false
for tok := range tokchan {
switch token := tok.(type) {
case sspiMsg:
sspi_msg, err = auth.NextBytes(token)
if err != nil {
return nil, err
}
case loginAckStruct:
success = true
sess.loginAck = token
case error:
return nil, fmt.Errorf("Login error: %s", token.Error())
case doneStruct:
if token.isError() {
return nil, fmt.Errorf("Login error: %s", token.getError())
}
}
}
if sspi_msg != nil {
outbuf.BeginPacket(packSSPIMessage, false)
_, err = outbuf.Write(sspi_msg)
if err != nil {
return nil, err
}
err = outbuf.FinishPacket()
if err != nil {
return nil, err
}
sspi_msg = nil
goto continue_login
}
if !success {
return nil, fmt.Errorf("Login failed")
}
if sess.routedServer != "" {
toconn.Close()
p.host = sess.routedServer
p.port = uint64(sess.routedPort)
goto initiate_connection
}
return &sess, nil
}
| vendor/github.com/denisenkom/go-mssqldb/tds.go | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.001461186446249485,
0.0001821067853597924,
0.0001617068483028561,
0.00016923800285439938,
0.00011281273327767849
] |
{
"id": 1,
"code_window": [
"\t\treturn Error(403, \"Not allowed to remove team member\", err)\n",
"\t}\n",
"\n",
"\tprotectLastAdmin := false\n",
"\tif c.OrgRole == m.ROLE_EDITOR {\n",
"\t\tprotectLastAdmin = true\n",
"\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tif c.OrgRole != m.ROLE_ADMIN {\n"
],
"file_path": "pkg/api/team_members.go",
"type": "replace",
"edit_start_line_idx": 93
} | package sqlstore
import (
"bytes"
"fmt"
"time"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
)
func init() {
bus.AddHandler("sql", CreateTeam)
bus.AddHandler("sql", UpdateTeam)
bus.AddHandler("sql", DeleteTeam)
bus.AddHandler("sql", SearchTeams)
bus.AddHandler("sql", GetTeamById)
bus.AddHandler("sql", GetTeamsByUser)
bus.AddHandler("sql", AddTeamMember)
bus.AddHandler("sql", UpdateTeamMember)
bus.AddHandler("sql", RemoveTeamMember)
bus.AddHandler("sql", GetTeamMembers)
}
func getTeamSelectSqlBase() string {
return `SELECT
team.id as id,
team.org_id,
team.name as name,
team.email as email,
(SELECT COUNT(*) from team_member where team_member.team_id = team.id) as member_count
FROM team as team `
}
func CreateTeam(cmd *m.CreateTeamCommand) error {
return inTransaction(func(sess *DBSession) error {
if isNameTaken, err := isTeamNameTaken(cmd.OrgId, cmd.Name, 0, sess); err != nil {
return err
} else if isNameTaken {
return m.ErrTeamNameTaken
}
team := m.Team{
Name: cmd.Name,
Email: cmd.Email,
OrgId: cmd.OrgId,
Created: time.Now(),
Updated: time.Now(),
}
_, err := sess.Insert(&team)
cmd.Result = team
return err
})
}
func UpdateTeam(cmd *m.UpdateTeamCommand) error {
return inTransaction(func(sess *DBSession) error {
if isNameTaken, err := isTeamNameTaken(cmd.OrgId, cmd.Name, cmd.Id, sess); err != nil {
return err
} else if isNameTaken {
return m.ErrTeamNameTaken
}
team := m.Team{
Name: cmd.Name,
Email: cmd.Email,
Updated: time.Now(),
}
sess.MustCols("email")
affectedRows, err := sess.ID(cmd.Id).Update(&team)
if err != nil {
return err
}
if affectedRows == 0 {
return m.ErrTeamNotFound
}
return nil
})
}
// DeleteTeam will delete a team, its member and any permissions connected to the team
func DeleteTeam(cmd *m.DeleteTeamCommand) error {
return inTransaction(func(sess *DBSession) error {
if _, err := teamExists(cmd.OrgId, cmd.Id, sess); err != nil {
return err
}
deletes := []string{
"DELETE FROM team_member WHERE org_id=? and team_id = ?",
"DELETE FROM team WHERE org_id=? and id = ?",
"DELETE FROM dashboard_acl WHERE org_id=? and team_id = ?",
}
for _, sql := range deletes {
_, err := sess.Exec(sql, cmd.OrgId, cmd.Id)
if err != nil {
return err
}
}
return nil
})
}
func teamExists(orgId int64, teamId int64, sess *DBSession) (bool, error) {
if res, err := sess.Query("SELECT 1 from team WHERE org_id=? and id=?", orgId, teamId); err != nil {
return false, err
} else if len(res) != 1 {
return false, m.ErrTeamNotFound
}
return true, nil
}
func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession) (bool, error) {
var team m.Team
exists, err := sess.Where("org_id=? and name=?", orgId, name).Get(&team)
if err != nil {
return false, nil
}
if exists && existingId != team.Id {
return true, nil
}
return false, nil
}
func SearchTeams(query *m.SearchTeamsQuery) error {
query.Result = m.SearchTeamQueryResult{
Teams: make([]*m.TeamDTO, 0),
}
queryWithWildcards := "%" + query.Query + "%"
var sql bytes.Buffer
params := make([]interface{}, 0)
sql.WriteString(getTeamSelectSqlBase())
if query.UserIdFilter > 0 {
sql.WriteString(`INNER JOIN team_member on team.id = team_member.team_id AND team_member.user_id = ?`)
params = append(params, query.UserIdFilter)
}
sql.WriteString(` WHERE team.org_id = ?`)
params = append(params, query.OrgId)
if query.Query != "" {
sql.WriteString(` and team.name ` + dialect.LikeStr() + ` ?`)
params = append(params, queryWithWildcards)
}
if query.Name != "" {
sql.WriteString(` and team.name = ?`)
params = append(params, query.Name)
}
sql.WriteString(` order by team.name asc`)
if query.Limit != 0 {
offset := query.Limit * (query.Page - 1)
sql.WriteString(dialect.LimitOffset(int64(query.Limit), int64(offset)))
}
if err := x.SQL(sql.String(), params...).Find(&query.Result.Teams); err != nil {
return err
}
team := m.Team{}
countSess := x.Table("team")
if query.Query != "" {
countSess.Where(`name `+dialect.LikeStr()+` ?`, queryWithWildcards)
}
if query.Name != "" {
countSess.Where("name=?", query.Name)
}
count, err := countSess.Count(&team)
query.Result.TotalCount = count
return err
}
func GetTeamById(query *m.GetTeamByIdQuery) error {
var sql bytes.Buffer
sql.WriteString(getTeamSelectSqlBase())
sql.WriteString(` WHERE team.org_id = ? and team.id = ?`)
var team m.TeamDTO
exists, err := x.SQL(sql.String(), query.OrgId, query.Id).Get(&team)
if err != nil {
return err
}
if !exists {
return m.ErrTeamNotFound
}
query.Result = &team
return nil
}
// GetTeamsByUser is used by the Guardian when checking a users' permissions
func GetTeamsByUser(query *m.GetTeamsByUserQuery) error {
query.Result = make([]*m.TeamDTO, 0)
var sql bytes.Buffer
sql.WriteString(getTeamSelectSqlBase())
sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`)
sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`)
err := x.SQL(sql.String(), query.OrgId, query.UserId).Find(&query.Result)
return err
}
// AddTeamMember adds a user to a team
func AddTeamMember(cmd *m.AddTeamMemberCommand) error {
return inTransaction(func(sess *DBSession) error {
if res, err := sess.Query("SELECT 1 from team_member WHERE org_id=? and team_id=? and user_id=?", cmd.OrgId, cmd.TeamId, cmd.UserId); err != nil {
return err
} else if len(res) == 1 {
return m.ErrTeamMemberAlreadyAdded
}
if _, err := teamExists(cmd.OrgId, cmd.TeamId, sess); err != nil {
return err
}
entity := m.TeamMember{
OrgId: cmd.OrgId,
TeamId: cmd.TeamId,
UserId: cmd.UserId,
External: cmd.External,
Created: time.Now(),
Updated: time.Now(),
Permission: cmd.Permission,
}
_, err := sess.Insert(&entity)
return err
})
}
// UpdateTeamMember updates a team member
func UpdateTeamMember(cmd *m.UpdateTeamMemberCommand) error {
return inTransaction(func(sess *DBSession) error {
rawSql := `SELECT * FROM team_member WHERE org_id=? and team_id=? and user_id=?`
var member m.TeamMember
exists, err := sess.SQL(rawSql, cmd.OrgId, cmd.TeamId, cmd.UserId).Get(&member)
if err != nil {
return err
}
if !exists {
return m.ErrTeamMemberNotFound
}
if cmd.Permission != m.PERMISSION_ADMIN {
cmd.Permission = 0
}
member.Permission = cmd.Permission
_, err = sess.Cols("permission").Where("org_id=? and team_id=? and user_id=?", cmd.OrgId, cmd.TeamId, cmd.UserId).Update(member)
return err
})
}
// RemoveTeamMember removes a member from a team
func RemoveTeamMember(cmd *m.RemoveTeamMemberCommand) error {
return inTransaction(func(sess *DBSession) error {
if _, err := teamExists(cmd.OrgId, cmd.TeamId, sess); err != nil {
return err
}
if cmd.ProtectLastAdmin {
lastAdmin, err := isLastAdmin(sess, cmd.OrgId, cmd.TeamId, cmd.UserId)
if err != nil {
return err
}
if lastAdmin {
return m.ErrLastTeamAdmin
}
}
var rawSql = "DELETE FROM team_member WHERE org_id=? and team_id=? and user_id=?"
res, err := sess.Exec(rawSql, cmd.OrgId, cmd.TeamId, cmd.UserId)
if err != nil {
return err
}
rows, err := res.RowsAffected()
if rows == 0 {
return m.ErrTeamMemberNotFound
}
return err
})
}
func isLastAdmin(sess *DBSession, orgId int64, teamId int64, userId int64) (bool, error) {
rawSql := "SELECT user_id FROM team_member WHERE org_id=? and team_id=? and permission=?"
userIds := []*int64{}
err := sess.SQL(rawSql, orgId, teamId, m.PERMISSION_ADMIN).Find(&userIds)
if err != nil {
return false, err
}
isAdmin := false
for _, adminId := range userIds {
if userId == *adminId {
isAdmin = true
break
}
}
if isAdmin && len(userIds) == 1 {
return true, nil
}
return false, err
}
// GetTeamMembers return a list of members for the specified team
func GetTeamMembers(query *m.GetTeamMembersQuery) error {
query.Result = make([]*m.TeamMemberDTO, 0)
sess := x.Table("team_member")
sess.Join("INNER", x.Dialect().Quote("user"), fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user")))
if query.OrgId != 0 {
sess.Where("team_member.org_id=?", query.OrgId)
}
if query.TeamId != 0 {
sess.Where("team_member.team_id=?", query.TeamId)
}
if query.UserId != 0 {
sess.Where("team_member.user_id=?", query.UserId)
}
if query.External {
sess.Where("team_member.external=?", dialect.BooleanStr(true))
}
sess.Cols("team_member.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login", "team_member.external", "team_member.permission")
sess.Asc("user.login", "user.email")
err := sess.Find(&query.Result)
return err
}
| pkg/services/sqlstore/team.go | 1 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.02573862113058567,
0.002131585730239749,
0.00016148141003213823,
0.0013007728848606348,
0.004183514975011349
] |
{
"id": 1,
"code_window": [
"\t\treturn Error(403, \"Not allowed to remove team member\", err)\n",
"\t}\n",
"\n",
"\tprotectLastAdmin := false\n",
"\tif c.OrgRole == m.ROLE_EDITOR {\n",
"\t\tprotectLastAdmin = true\n",
"\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tif c.OrgRole != m.ROLE_ADMIN {\n"
],
"file_path": "pkg/api/team_members.go",
"type": "replace",
"edit_start_line_idx": 93
} | package migrator
import (
"fmt"
"strconv"
"strings"
"github.com/go-xorm/xorm"
"github.com/lib/pq"
)
type Postgres struct {
BaseDialect
}
func NewPostgresDialect(engine *xorm.Engine) *Postgres {
d := Postgres{}
d.BaseDialect.dialect = &d
d.BaseDialect.engine = engine
d.BaseDialect.driverName = POSTGRES
return &d
}
func (db *Postgres) SupportEngine() bool {
return false
}
func (db *Postgres) Quote(name string) string {
return "\"" + name + "\""
}
func (b *Postgres) LikeStr() string {
return "ILIKE"
}
func (db *Postgres) AutoIncrStr() string {
return ""
}
func (db *Postgres) BooleanStr(value bool) string {
return strconv.FormatBool(value)
}
func (b *Postgres) Default(col *Column) string {
if col.Type == DB_Bool {
if col.Default == "0" {
return "FALSE"
}
return "TRUE"
}
return col.Default
}
func (db *Postgres) SqlType(c *Column) string {
var res string
switch t := c.Type; t {
case DB_TinyInt:
res = DB_SmallInt
return res
case DB_MediumInt, DB_Int, DB_Integer:
if c.IsAutoIncrement {
return DB_Serial
}
return DB_Integer
case DB_Serial, DB_BigSerial:
c.IsAutoIncrement = true
c.Nullable = false
res = t
case DB_Binary, DB_VarBinary:
return DB_Bytea
case DB_DateTime:
res = DB_TimeStamp
case DB_TimeStampz:
return "timestamp with time zone"
case DB_Float:
res = DB_Real
case DB_TinyText, DB_MediumText, DB_LongText:
res = DB_Text
case DB_NVarchar:
res = DB_Varchar
case DB_Uuid:
res = DB_Uuid
case DB_Blob, DB_TinyBlob, DB_MediumBlob, DB_LongBlob:
return DB_Bytea
case DB_Double:
return "DOUBLE PRECISION"
default:
if c.IsAutoIncrement {
return DB_Serial
}
res = t
}
var hasLen1 = (c.Length > 0)
var hasLen2 = (c.Length2 > 0)
if hasLen2 {
res += "(" + strconv.Itoa(c.Length) + "," + strconv.Itoa(c.Length2) + ")"
} else if hasLen1 {
res += "(" + strconv.Itoa(c.Length) + ")"
}
return res
}
func (db *Postgres) IndexCheckSql(tableName, indexName string) (string, []interface{}) {
args := []interface{}{tableName, indexName}
sql := "SELECT 1 FROM " + db.Quote("pg_indexes") + " WHERE" + db.Quote("tablename") + "=? AND " + db.Quote("indexname") + "=?"
return sql, args
}
func (db *Postgres) DropIndexSql(tableName string, index *Index) string {
quote := db.Quote
idxName := index.XName(tableName)
return fmt.Sprintf("DROP INDEX %v", quote(idxName))
}
func (db *Postgres) UpdateTableSql(tableName string, columns []*Column) string {
var statements = []string{}
for _, col := range columns {
statements = append(statements, "ALTER "+db.Quote(col.Name)+" TYPE "+db.SqlType(col))
}
return "ALTER TABLE " + db.Quote(tableName) + " " + strings.Join(statements, ", ") + ";"
}
func (db *Postgres) CleanDB() error {
sess := db.engine.NewSession()
defer sess.Close()
if _, err := sess.Exec("DROP SCHEMA public CASCADE;"); err != nil {
return fmt.Errorf("Failed to drop schema public")
}
if _, err := sess.Exec("CREATE SCHEMA public;"); err != nil {
return fmt.Errorf("Failed to create schema public")
}
return nil
}
func (db *Postgres) IsUniqueConstraintViolation(err error) bool {
if driverErr, ok := err.(*pq.Error); ok {
if driverErr.Code == "23505" {
return true
}
}
return false
}
| pkg/services/sqlstore/migrator/postgres_dialect.go | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00034948159009218216,
0.0002032493648584932,
0.0001657210086705163,
0.0001727325579850003,
0.00006548025703523308
] |
{
"id": 1,
"code_window": [
"\t\treturn Error(403, \"Not allowed to remove team member\", err)\n",
"\t}\n",
"\n",
"\tprotectLastAdmin := false\n",
"\tif c.OrgRole == m.ROLE_EDITOR {\n",
"\t\tprotectLastAdmin = true\n",
"\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tif c.OrgRole != m.ROLE_ADMIN {\n"
],
"file_path": "pkg/api/team_members.go",
"type": "replace",
"edit_start_line_idx": 93
} | import _ from 'lodash';
import { PanelCtrl } from 'app/plugins/sdk';
import impressionSrv from 'app/core/services/impression_srv';
class DashListCtrl extends PanelCtrl {
static templateUrl = 'module.html';
static scrollable = true;
groups: any[];
modes: any[];
panelDefaults = {
query: '',
limit: 10,
tags: [],
recent: false,
search: false,
starred: true,
headings: true,
folderId: null,
};
/** @ngInject */
constructor($scope, $injector, private backendSrv, private dashboardSrv) {
super($scope, $injector);
_.defaults(this.panel, this.panelDefaults);
if (this.panel.tag) {
this.panel.tags = [this.panel.tag];
delete this.panel.tag;
}
this.events.on('refresh', this.onRefresh.bind(this));
this.events.on('init-edit-mode', this.onInitEditMode.bind(this));
this.groups = [
{ list: [], show: false, header: 'Starred dashboards' },
{ list: [], show: false, header: 'Recently viewed dashboards' },
{ list: [], show: false, header: 'Search' },
];
// update capability
if (this.panel.mode) {
if (this.panel.mode === 'starred') {
this.panel.starred = true;
this.panel.headings = false;
}
if (this.panel.mode === 'recently viewed') {
this.panel.recent = true;
this.panel.starred = false;
this.panel.headings = false;
}
if (this.panel.mode === 'search') {
this.panel.search = true;
this.panel.starred = false;
this.panel.headings = false;
}
delete this.panel.mode;
}
}
onInitEditMode() {
this.modes = ['starred', 'search', 'recently viewed'];
this.addEditorTab('Options', 'public/app/plugins/panel/dashlist/editor.html');
}
onRefresh() {
const promises = [];
promises.push(this.getRecentDashboards());
promises.push(this.getStarred());
promises.push(this.getSearch());
return Promise.all(promises).then(this.renderingCompleted.bind(this));
}
getSearch() {
this.groups[2].show = this.panel.search;
if (!this.panel.search) {
return Promise.resolve();
}
const params = {
limit: this.panel.limit,
query: this.panel.query,
tag: this.panel.tags,
folderIds: this.panel.folderId,
type: 'dash-db',
};
return this.backendSrv.search(params).then(result => {
this.groups[2].list = result;
});
}
getStarred() {
this.groups[0].show = this.panel.starred;
if (!this.panel.starred) {
return Promise.resolve();
}
const params = { limit: this.panel.limit, starred: 'true' };
return this.backendSrv.search(params).then(result => {
this.groups[0].list = result;
});
}
starDashboard(dash, evt) {
this.dashboardSrv.starDashboard(dash.id, dash.isStarred).then(newState => {
dash.isStarred = newState;
});
if (evt) {
evt.stopPropagation();
evt.preventDefault();
}
}
getRecentDashboards() {
this.groups[1].show = this.panel.recent;
if (!this.panel.recent) {
return Promise.resolve();
}
const dashIds = _.take(impressionSrv.getDashboardOpened(), this.panel.limit);
return this.backendSrv.search({ dashboardIds: dashIds, limit: this.panel.limit }).then(result => {
this.groups[1].list = dashIds
.map(orderId => {
return _.find(result, dashboard => {
return dashboard.id === orderId;
});
})
.filter(el => {
return el !== undefined;
});
});
}
onFolderChange(folder: any) {
this.panel.folderId = folder.id;
this.refresh();
}
}
export { DashListCtrl, DashListCtrl as PanelCtrl };
| public/app/plugins/panel/dashlist/module.ts | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.0001775909768184647,
0.00017307678353972733,
0.00016925460658967495,
0.0001731013908283785,
0.0000024079317881842144
] |
{
"id": 1,
"code_window": [
"\t\treturn Error(403, \"Not allowed to remove team member\", err)\n",
"\t}\n",
"\n",
"\tprotectLastAdmin := false\n",
"\tif c.OrgRole == m.ROLE_EDITOR {\n",
"\t\tprotectLastAdmin = true\n",
"\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tif c.OrgRole != m.ROLE_ADMIN {\n"
],
"file_path": "pkg/api/team_members.go",
"type": "replace",
"edit_start_line_idx": 93
} | #!/bin/sh
set -u
# User params
USER_PARAMS=$@
# Internal params
RUN_CMD="snmpd -f ${USER_PARAMS}"
#######################################
# Echo/log function
# Arguments:
# String: value to log
#######################################
log() {
if [[ "$@" ]]; then echo "[`date +'%Y-%m-%d %T'`] $@";
else echo; fi
}
# Launch
log $RUN_CMD
$RUN_CMD
# Exit immediately in case of any errors or when we have interactive terminal
if [[ $? != 0 ]] || test -t 0; then exit $?; fi
log
| devenv/docker/blocks/smtp/bootstrap.sh | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017108845349866897,
0.00016941886860877275,
0.00016826596402097493,
0.0001689022028585896,
0.00000120880736176332
] |
{
"id": 2,
"code_window": [
"\tExternal bool `json:\"-\"`\n",
"\tPermission PermissionType `json:\"-\"`\n",
"}\n",
"\n",
"type UpdateTeamMemberCommand struct {\n",
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"}\n",
"\n",
"type RemoveTeamMemberCommand struct {\n",
"\tOrgId int64 `json:\"-\"`\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"\tProtectLastAdmin bool `json:\"-\"`\n"
],
"file_path": "pkg/models/team_member.go",
"type": "replace",
"edit_start_line_idx": 37
} | package sqlstore
import (
"bytes"
"fmt"
"time"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
)
func init() {
bus.AddHandler("sql", CreateTeam)
bus.AddHandler("sql", UpdateTeam)
bus.AddHandler("sql", DeleteTeam)
bus.AddHandler("sql", SearchTeams)
bus.AddHandler("sql", GetTeamById)
bus.AddHandler("sql", GetTeamsByUser)
bus.AddHandler("sql", AddTeamMember)
bus.AddHandler("sql", UpdateTeamMember)
bus.AddHandler("sql", RemoveTeamMember)
bus.AddHandler("sql", GetTeamMembers)
}
func getTeamSelectSqlBase() string {
return `SELECT
team.id as id,
team.org_id,
team.name as name,
team.email as email,
(SELECT COUNT(*) from team_member where team_member.team_id = team.id) as member_count
FROM team as team `
}
func CreateTeam(cmd *m.CreateTeamCommand) error {
return inTransaction(func(sess *DBSession) error {
if isNameTaken, err := isTeamNameTaken(cmd.OrgId, cmd.Name, 0, sess); err != nil {
return err
} else if isNameTaken {
return m.ErrTeamNameTaken
}
team := m.Team{
Name: cmd.Name,
Email: cmd.Email,
OrgId: cmd.OrgId,
Created: time.Now(),
Updated: time.Now(),
}
_, err := sess.Insert(&team)
cmd.Result = team
return err
})
}
func UpdateTeam(cmd *m.UpdateTeamCommand) error {
return inTransaction(func(sess *DBSession) error {
if isNameTaken, err := isTeamNameTaken(cmd.OrgId, cmd.Name, cmd.Id, sess); err != nil {
return err
} else if isNameTaken {
return m.ErrTeamNameTaken
}
team := m.Team{
Name: cmd.Name,
Email: cmd.Email,
Updated: time.Now(),
}
sess.MustCols("email")
affectedRows, err := sess.ID(cmd.Id).Update(&team)
if err != nil {
return err
}
if affectedRows == 0 {
return m.ErrTeamNotFound
}
return nil
})
}
// DeleteTeam will delete a team, its member and any permissions connected to the team
func DeleteTeam(cmd *m.DeleteTeamCommand) error {
return inTransaction(func(sess *DBSession) error {
if _, err := teamExists(cmd.OrgId, cmd.Id, sess); err != nil {
return err
}
deletes := []string{
"DELETE FROM team_member WHERE org_id=? and team_id = ?",
"DELETE FROM team WHERE org_id=? and id = ?",
"DELETE FROM dashboard_acl WHERE org_id=? and team_id = ?",
}
for _, sql := range deletes {
_, err := sess.Exec(sql, cmd.OrgId, cmd.Id)
if err != nil {
return err
}
}
return nil
})
}
func teamExists(orgId int64, teamId int64, sess *DBSession) (bool, error) {
if res, err := sess.Query("SELECT 1 from team WHERE org_id=? and id=?", orgId, teamId); err != nil {
return false, err
} else if len(res) != 1 {
return false, m.ErrTeamNotFound
}
return true, nil
}
func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession) (bool, error) {
var team m.Team
exists, err := sess.Where("org_id=? and name=?", orgId, name).Get(&team)
if err != nil {
return false, nil
}
if exists && existingId != team.Id {
return true, nil
}
return false, nil
}
func SearchTeams(query *m.SearchTeamsQuery) error {
query.Result = m.SearchTeamQueryResult{
Teams: make([]*m.TeamDTO, 0),
}
queryWithWildcards := "%" + query.Query + "%"
var sql bytes.Buffer
params := make([]interface{}, 0)
sql.WriteString(getTeamSelectSqlBase())
if query.UserIdFilter > 0 {
sql.WriteString(`INNER JOIN team_member on team.id = team_member.team_id AND team_member.user_id = ?`)
params = append(params, query.UserIdFilter)
}
sql.WriteString(` WHERE team.org_id = ?`)
params = append(params, query.OrgId)
if query.Query != "" {
sql.WriteString(` and team.name ` + dialect.LikeStr() + ` ?`)
params = append(params, queryWithWildcards)
}
if query.Name != "" {
sql.WriteString(` and team.name = ?`)
params = append(params, query.Name)
}
sql.WriteString(` order by team.name asc`)
if query.Limit != 0 {
offset := query.Limit * (query.Page - 1)
sql.WriteString(dialect.LimitOffset(int64(query.Limit), int64(offset)))
}
if err := x.SQL(sql.String(), params...).Find(&query.Result.Teams); err != nil {
return err
}
team := m.Team{}
countSess := x.Table("team")
if query.Query != "" {
countSess.Where(`name `+dialect.LikeStr()+` ?`, queryWithWildcards)
}
if query.Name != "" {
countSess.Where("name=?", query.Name)
}
count, err := countSess.Count(&team)
query.Result.TotalCount = count
return err
}
func GetTeamById(query *m.GetTeamByIdQuery) error {
var sql bytes.Buffer
sql.WriteString(getTeamSelectSqlBase())
sql.WriteString(` WHERE team.org_id = ? and team.id = ?`)
var team m.TeamDTO
exists, err := x.SQL(sql.String(), query.OrgId, query.Id).Get(&team)
if err != nil {
return err
}
if !exists {
return m.ErrTeamNotFound
}
query.Result = &team
return nil
}
// GetTeamsByUser is used by the Guardian when checking a users' permissions
func GetTeamsByUser(query *m.GetTeamsByUserQuery) error {
query.Result = make([]*m.TeamDTO, 0)
var sql bytes.Buffer
sql.WriteString(getTeamSelectSqlBase())
sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`)
sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`)
err := x.SQL(sql.String(), query.OrgId, query.UserId).Find(&query.Result)
return err
}
// AddTeamMember adds a user to a team
func AddTeamMember(cmd *m.AddTeamMemberCommand) error {
return inTransaction(func(sess *DBSession) error {
if res, err := sess.Query("SELECT 1 from team_member WHERE org_id=? and team_id=? and user_id=?", cmd.OrgId, cmd.TeamId, cmd.UserId); err != nil {
return err
} else if len(res) == 1 {
return m.ErrTeamMemberAlreadyAdded
}
if _, err := teamExists(cmd.OrgId, cmd.TeamId, sess); err != nil {
return err
}
entity := m.TeamMember{
OrgId: cmd.OrgId,
TeamId: cmd.TeamId,
UserId: cmd.UserId,
External: cmd.External,
Created: time.Now(),
Updated: time.Now(),
Permission: cmd.Permission,
}
_, err := sess.Insert(&entity)
return err
})
}
// UpdateTeamMember updates a team member
func UpdateTeamMember(cmd *m.UpdateTeamMemberCommand) error {
return inTransaction(func(sess *DBSession) error {
rawSql := `SELECT * FROM team_member WHERE org_id=? and team_id=? and user_id=?`
var member m.TeamMember
exists, err := sess.SQL(rawSql, cmd.OrgId, cmd.TeamId, cmd.UserId).Get(&member)
if err != nil {
return err
}
if !exists {
return m.ErrTeamMemberNotFound
}
if cmd.Permission != m.PERMISSION_ADMIN {
cmd.Permission = 0
}
member.Permission = cmd.Permission
_, err = sess.Cols("permission").Where("org_id=? and team_id=? and user_id=?", cmd.OrgId, cmd.TeamId, cmd.UserId).Update(member)
return err
})
}
// RemoveTeamMember removes a member from a team
func RemoveTeamMember(cmd *m.RemoveTeamMemberCommand) error {
return inTransaction(func(sess *DBSession) error {
if _, err := teamExists(cmd.OrgId, cmd.TeamId, sess); err != nil {
return err
}
if cmd.ProtectLastAdmin {
lastAdmin, err := isLastAdmin(sess, cmd.OrgId, cmd.TeamId, cmd.UserId)
if err != nil {
return err
}
if lastAdmin {
return m.ErrLastTeamAdmin
}
}
var rawSql = "DELETE FROM team_member WHERE org_id=? and team_id=? and user_id=?"
res, err := sess.Exec(rawSql, cmd.OrgId, cmd.TeamId, cmd.UserId)
if err != nil {
return err
}
rows, err := res.RowsAffected()
if rows == 0 {
return m.ErrTeamMemberNotFound
}
return err
})
}
func isLastAdmin(sess *DBSession, orgId int64, teamId int64, userId int64) (bool, error) {
rawSql := "SELECT user_id FROM team_member WHERE org_id=? and team_id=? and permission=?"
userIds := []*int64{}
err := sess.SQL(rawSql, orgId, teamId, m.PERMISSION_ADMIN).Find(&userIds)
if err != nil {
return false, err
}
isAdmin := false
for _, adminId := range userIds {
if userId == *adminId {
isAdmin = true
break
}
}
if isAdmin && len(userIds) == 1 {
return true, nil
}
return false, err
}
// GetTeamMembers return a list of members for the specified team
func GetTeamMembers(query *m.GetTeamMembersQuery) error {
query.Result = make([]*m.TeamMemberDTO, 0)
sess := x.Table("team_member")
sess.Join("INNER", x.Dialect().Quote("user"), fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user")))
if query.OrgId != 0 {
sess.Where("team_member.org_id=?", query.OrgId)
}
if query.TeamId != 0 {
sess.Where("team_member.team_id=?", query.TeamId)
}
if query.UserId != 0 {
sess.Where("team_member.user_id=?", query.UserId)
}
if query.External {
sess.Where("team_member.external=?", dialect.BooleanStr(true))
}
sess.Cols("team_member.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login", "team_member.external", "team_member.permission")
sess.Asc("user.login", "user.email")
err := sess.Find(&query.Result)
return err
}
| pkg/services/sqlstore/team.go | 1 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.9961013793945312,
0.10083955526351929,
0.00016479323676321656,
0.0002504989388398826,
0.28828707337379456
] |
{
"id": 2,
"code_window": [
"\tExternal bool `json:\"-\"`\n",
"\tPermission PermissionType `json:\"-\"`\n",
"}\n",
"\n",
"type UpdateTeamMemberCommand struct {\n",
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"}\n",
"\n",
"type RemoveTeamMemberCommand struct {\n",
"\tOrgId int64 `json:\"-\"`\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"\tProtectLastAdmin bool `json:\"-\"`\n"
],
"file_path": "pkg/models/team_member.go",
"type": "replace",
"edit_start_line_idx": 37
} | // Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package norm
import "unicode/utf8"
const (
maxNonStarters = 30
// The maximum number of characters needed for a buffer is
// maxNonStarters + 1 for the starter + 1 for the GCJ
maxBufferSize = maxNonStarters + 2
maxNFCExpansion = 3 // NFC(0x1D160)
maxNFKCExpansion = 18 // NFKC(0xFDFA)
maxByteBufferSize = utf8.UTFMax * maxBufferSize // 128
)
// ssState is used for reporting the segment state after inserting a rune.
// It is returned by streamSafe.next.
type ssState int
const (
// Indicates a rune was successfully added to the segment.
ssSuccess ssState = iota
// Indicates a rune starts a new segment and should not be added.
ssStarter
// Indicates a rune caused a segment overflow and a CGJ should be inserted.
ssOverflow
)
// streamSafe implements the policy of when a CGJ should be inserted.
type streamSafe uint8
// first inserts the first rune of a segment. It is a faster version of next if
// it is known p represents the first rune in a segment.
func (ss *streamSafe) first(p Properties) {
*ss = streamSafe(p.nTrailingNonStarters())
}
// insert returns a ssState value to indicate whether a rune represented by p
// can be inserted.
func (ss *streamSafe) next(p Properties) ssState {
if *ss > maxNonStarters {
panic("streamSafe was not reset")
}
n := p.nLeadingNonStarters()
if *ss += streamSafe(n); *ss > maxNonStarters {
*ss = 0
return ssOverflow
}
// The Stream-Safe Text Processing prescribes that the counting can stop
// as soon as a starter is encountered. However, there are some starters,
// like Jamo V and T, that can combine with other runes, leaving their
// successive non-starters appended to the previous, possibly causing an
// overflow. We will therefore consider any rune with a non-zero nLead to
// be a non-starter. Note that it always hold that if nLead > 0 then
// nLead == nTrail.
if n == 0 {
*ss = streamSafe(p.nTrailingNonStarters())
return ssStarter
}
return ssSuccess
}
// backwards is used for checking for overflow and segment starts
// when traversing a string backwards. Users do not need to call first
// for the first rune. The state of the streamSafe retains the count of
// the non-starters loaded.
func (ss *streamSafe) backwards(p Properties) ssState {
if *ss > maxNonStarters {
panic("streamSafe was not reset")
}
c := *ss + streamSafe(p.nTrailingNonStarters())
if c > maxNonStarters {
return ssOverflow
}
*ss = c
if p.nLeadingNonStarters() == 0 {
return ssStarter
}
return ssSuccess
}
func (ss streamSafe) isMax() bool {
return ss == maxNonStarters
}
// GraphemeJoiner is inserted after maxNonStarters non-starter runes.
const GraphemeJoiner = "\u034F"
// reorderBuffer is used to normalize a single segment. Characters inserted with
// insert are decomposed and reordered based on CCC. The compose method can
// be used to recombine characters. Note that the byte buffer does not hold
// the UTF-8 characters in order. Only the rune array is maintained in sorted
// order. flush writes the resulting segment to a byte array.
type reorderBuffer struct {
rune [maxBufferSize]Properties // Per character info.
byte [maxByteBufferSize]byte // UTF-8 buffer. Referenced by runeInfo.pos.
nbyte uint8 // Number or bytes.
ss streamSafe // For limiting length of non-starter sequence.
nrune int // Number of runeInfos.
f formInfo
src input
nsrc int
tmpBytes input
out []byte
flushF func(*reorderBuffer) bool
}
func (rb *reorderBuffer) init(f Form, src []byte) {
rb.f = *formTable[f]
rb.src.setBytes(src)
rb.nsrc = len(src)
rb.ss = 0
}
func (rb *reorderBuffer) initString(f Form, src string) {
rb.f = *formTable[f]
rb.src.setString(src)
rb.nsrc = len(src)
rb.ss = 0
}
func (rb *reorderBuffer) setFlusher(out []byte, f func(*reorderBuffer) bool) {
rb.out = out
rb.flushF = f
}
// reset discards all characters from the buffer.
func (rb *reorderBuffer) reset() {
rb.nrune = 0
rb.nbyte = 0
}
func (rb *reorderBuffer) doFlush() bool {
if rb.f.composing {
rb.compose()
}
res := rb.flushF(rb)
rb.reset()
return res
}
// appendFlush appends the normalized segment to rb.out.
func appendFlush(rb *reorderBuffer) bool {
for i := 0; i < rb.nrune; i++ {
start := rb.rune[i].pos
end := start + rb.rune[i].size
rb.out = append(rb.out, rb.byte[start:end]...)
}
return true
}
// flush appends the normalized segment to out and resets rb.
func (rb *reorderBuffer) flush(out []byte) []byte {
for i := 0; i < rb.nrune; i++ {
start := rb.rune[i].pos
end := start + rb.rune[i].size
out = append(out, rb.byte[start:end]...)
}
rb.reset()
return out
}
// flushCopy copies the normalized segment to buf and resets rb.
// It returns the number of bytes written to buf.
func (rb *reorderBuffer) flushCopy(buf []byte) int {
p := 0
for i := 0; i < rb.nrune; i++ {
runep := rb.rune[i]
p += copy(buf[p:], rb.byte[runep.pos:runep.pos+runep.size])
}
rb.reset()
return p
}
// insertOrdered inserts a rune in the buffer, ordered by Canonical Combining Class.
// It returns false if the buffer is not large enough to hold the rune.
// It is used internally by insert and insertString only.
func (rb *reorderBuffer) insertOrdered(info Properties) {
n := rb.nrune
b := rb.rune[:]
cc := info.ccc
if cc > 0 {
// Find insertion position + move elements to make room.
for ; n > 0; n-- {
if b[n-1].ccc <= cc {
break
}
b[n] = b[n-1]
}
}
rb.nrune += 1
pos := uint8(rb.nbyte)
rb.nbyte += utf8.UTFMax
info.pos = pos
b[n] = info
}
// insertErr is an error code returned by insert. Using this type instead
// of error improves performance up to 20% for many of the benchmarks.
type insertErr int
const (
iSuccess insertErr = -iota
iShortDst
iShortSrc
)
// insertFlush inserts the given rune in the buffer ordered by CCC.
// If a decomposition with multiple segments are encountered, they leading
// ones are flushed.
// It returns a non-zero error code if the rune was not inserted.
func (rb *reorderBuffer) insertFlush(src input, i int, info Properties) insertErr {
if rune := src.hangul(i); rune != 0 {
rb.decomposeHangul(rune)
return iSuccess
}
if info.hasDecomposition() {
return rb.insertDecomposed(info.Decomposition())
}
rb.insertSingle(src, i, info)
return iSuccess
}
// insertUnsafe inserts the given rune in the buffer ordered by CCC.
// It is assumed there is sufficient space to hold the runes. It is the
// responsibility of the caller to ensure this. This can be done by checking
// the state returned by the streamSafe type.
func (rb *reorderBuffer) insertUnsafe(src input, i int, info Properties) {
if rune := src.hangul(i); rune != 0 {
rb.decomposeHangul(rune)
}
if info.hasDecomposition() {
// TODO: inline.
rb.insertDecomposed(info.Decomposition())
} else {
rb.insertSingle(src, i, info)
}
}
// insertDecomposed inserts an entry in to the reorderBuffer for each rune
// in dcomp. dcomp must be a sequence of decomposed UTF-8-encoded runes.
// It flushes the buffer on each new segment start.
func (rb *reorderBuffer) insertDecomposed(dcomp []byte) insertErr {
rb.tmpBytes.setBytes(dcomp)
// As the streamSafe accounting already handles the counting for modifiers,
// we don't have to call next. However, we do need to keep the accounting
// intact when flushing the buffer.
for i := 0; i < len(dcomp); {
info := rb.f.info(rb.tmpBytes, i)
if info.BoundaryBefore() && rb.nrune > 0 && !rb.doFlush() {
return iShortDst
}
i += copy(rb.byte[rb.nbyte:], dcomp[i:i+int(info.size)])
rb.insertOrdered(info)
}
return iSuccess
}
// insertSingle inserts an entry in the reorderBuffer for the rune at
// position i. info is the runeInfo for the rune at position i.
func (rb *reorderBuffer) insertSingle(src input, i int, info Properties) {
src.copySlice(rb.byte[rb.nbyte:], i, i+int(info.size))
rb.insertOrdered(info)
}
// insertCGJ inserts a Combining Grapheme Joiner (0x034f) into rb.
func (rb *reorderBuffer) insertCGJ() {
rb.insertSingle(input{str: GraphemeJoiner}, 0, Properties{size: uint8(len(GraphemeJoiner))})
}
// appendRune inserts a rune at the end of the buffer. It is used for Hangul.
func (rb *reorderBuffer) appendRune(r rune) {
bn := rb.nbyte
sz := utf8.EncodeRune(rb.byte[bn:], rune(r))
rb.nbyte += utf8.UTFMax
rb.rune[rb.nrune] = Properties{pos: bn, size: uint8(sz)}
rb.nrune++
}
// assignRune sets a rune at position pos. It is used for Hangul and recomposition.
func (rb *reorderBuffer) assignRune(pos int, r rune) {
bn := rb.rune[pos].pos
sz := utf8.EncodeRune(rb.byte[bn:], rune(r))
rb.rune[pos] = Properties{pos: bn, size: uint8(sz)}
}
// runeAt returns the rune at position n. It is used for Hangul and recomposition.
func (rb *reorderBuffer) runeAt(n int) rune {
inf := rb.rune[n]
r, _ := utf8.DecodeRune(rb.byte[inf.pos : inf.pos+inf.size])
return r
}
// bytesAt returns the UTF-8 encoding of the rune at position n.
// It is used for Hangul and recomposition.
func (rb *reorderBuffer) bytesAt(n int) []byte {
inf := rb.rune[n]
return rb.byte[inf.pos : int(inf.pos)+int(inf.size)]
}
// For Hangul we combine algorithmically, instead of using tables.
const (
hangulBase = 0xAC00 // UTF-8(hangulBase) -> EA B0 80
hangulBase0 = 0xEA
hangulBase1 = 0xB0
hangulBase2 = 0x80
hangulEnd = hangulBase + jamoLVTCount // UTF-8(0xD7A4) -> ED 9E A4
hangulEnd0 = 0xED
hangulEnd1 = 0x9E
hangulEnd2 = 0xA4
jamoLBase = 0x1100 // UTF-8(jamoLBase) -> E1 84 00
jamoLBase0 = 0xE1
jamoLBase1 = 0x84
jamoLEnd = 0x1113
jamoVBase = 0x1161
jamoVEnd = 0x1176
jamoTBase = 0x11A7
jamoTEnd = 0x11C3
jamoTCount = 28
jamoVCount = 21
jamoVTCount = 21 * 28
jamoLVTCount = 19 * 21 * 28
)
const hangulUTF8Size = 3
func isHangul(b []byte) bool {
if len(b) < hangulUTF8Size {
return false
}
b0 := b[0]
if b0 < hangulBase0 {
return false
}
b1 := b[1]
switch {
case b0 == hangulBase0:
return b1 >= hangulBase1
case b0 < hangulEnd0:
return true
case b0 > hangulEnd0:
return false
case b1 < hangulEnd1:
return true
}
return b1 == hangulEnd1 && b[2] < hangulEnd2
}
func isHangulString(b string) bool {
if len(b) < hangulUTF8Size {
return false
}
b0 := b[0]
if b0 < hangulBase0 {
return false
}
b1 := b[1]
switch {
case b0 == hangulBase0:
return b1 >= hangulBase1
case b0 < hangulEnd0:
return true
case b0 > hangulEnd0:
return false
case b1 < hangulEnd1:
return true
}
return b1 == hangulEnd1 && b[2] < hangulEnd2
}
// Caller must ensure len(b) >= 2.
func isJamoVT(b []byte) bool {
// True if (rune & 0xff00) == jamoLBase
return b[0] == jamoLBase0 && (b[1]&0xFC) == jamoLBase1
}
func isHangulWithoutJamoT(b []byte) bool {
c, _ := utf8.DecodeRune(b)
c -= hangulBase
return c < jamoLVTCount && c%jamoTCount == 0
}
// decomposeHangul writes the decomposed Hangul to buf and returns the number
// of bytes written. len(buf) should be at least 9.
func decomposeHangul(buf []byte, r rune) int {
const JamoUTF8Len = 3
r -= hangulBase
x := r % jamoTCount
r /= jamoTCount
utf8.EncodeRune(buf, jamoLBase+r/jamoVCount)
utf8.EncodeRune(buf[JamoUTF8Len:], jamoVBase+r%jamoVCount)
if x != 0 {
utf8.EncodeRune(buf[2*JamoUTF8Len:], jamoTBase+x)
return 3 * JamoUTF8Len
}
return 2 * JamoUTF8Len
}
// decomposeHangul algorithmically decomposes a Hangul rune into
// its Jamo components.
// See http://unicode.org/reports/tr15/#Hangul for details on decomposing Hangul.
func (rb *reorderBuffer) decomposeHangul(r rune) {
r -= hangulBase
x := r % jamoTCount
r /= jamoTCount
rb.appendRune(jamoLBase + r/jamoVCount)
rb.appendRune(jamoVBase + r%jamoVCount)
if x != 0 {
rb.appendRune(jamoTBase + x)
}
}
// combineHangul algorithmically combines Jamo character components into Hangul.
// See http://unicode.org/reports/tr15/#Hangul for details on combining Hangul.
func (rb *reorderBuffer) combineHangul(s, i, k int) {
b := rb.rune[:]
bn := rb.nrune
for ; i < bn; i++ {
cccB := b[k-1].ccc
cccC := b[i].ccc
if cccB == 0 {
s = k - 1
}
if s != k-1 && cccB >= cccC {
// b[i] is blocked by greater-equal cccX below it
b[k] = b[i]
k++
} else {
l := rb.runeAt(s) // also used to compare to hangulBase
v := rb.runeAt(i) // also used to compare to jamoT
switch {
case jamoLBase <= l && l < jamoLEnd &&
jamoVBase <= v && v < jamoVEnd:
// 11xx plus 116x to LV
rb.assignRune(s, hangulBase+
(l-jamoLBase)*jamoVTCount+(v-jamoVBase)*jamoTCount)
case hangulBase <= l && l < hangulEnd &&
jamoTBase < v && v < jamoTEnd &&
((l-hangulBase)%jamoTCount) == 0:
// ACxx plus 11Ax to LVT
rb.assignRune(s, l+v-jamoTBase)
default:
b[k] = b[i]
k++
}
}
}
rb.nrune = k
}
// compose recombines the runes in the buffer.
// It should only be used to recompose a single segment, as it will not
// handle alternations between Hangul and non-Hangul characters correctly.
func (rb *reorderBuffer) compose() {
// UAX #15, section X5 , including Corrigendum #5
// "In any character sequence beginning with starter S, a character C is
// blocked from S if and only if there is some character B between S
// and C, and either B is a starter or it has the same or higher
// combining class as C."
bn := rb.nrune
if bn == 0 {
return
}
k := 1
b := rb.rune[:]
for s, i := 0, 1; i < bn; i++ {
if isJamoVT(rb.bytesAt(i)) {
// Redo from start in Hangul mode. Necessary to support
// U+320E..U+321E in NFKC mode.
rb.combineHangul(s, i, k)
return
}
ii := b[i]
// We can only use combineForward as a filter if we later
// get the info for the combined character. This is more
// expensive than using the filter. Using combinesBackward()
// is safe.
if ii.combinesBackward() {
cccB := b[k-1].ccc
cccC := ii.ccc
blocked := false // b[i] blocked by starter or greater or equal CCC?
if cccB == 0 {
s = k - 1
} else {
blocked = s != k-1 && cccB >= cccC
}
if !blocked {
combined := combine(rb.runeAt(s), rb.runeAt(i))
if combined != 0 {
rb.assignRune(s, combined)
continue
}
}
}
b[k] = b[i]
k++
}
rb.nrune = k
}
| vendor/golang.org/x/text/unicode/norm/composition.go | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017636433767620474,
0.00017175477114506066,
0.00016440807667095214,
0.0001717511040624231,
0.0000024434696115349652
] |
{
"id": 2,
"code_window": [
"\tExternal bool `json:\"-\"`\n",
"\tPermission PermissionType `json:\"-\"`\n",
"}\n",
"\n",
"type UpdateTeamMemberCommand struct {\n",
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"}\n",
"\n",
"type RemoveTeamMemberCommand struct {\n",
"\tOrgId int64 `json:\"-\"`\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"\tProtectLastAdmin bool `json:\"-\"`\n"
],
"file_path": "pkg/models/team_member.go",
"type": "replace",
"edit_start_line_idx": 37
} | // linux/mksysnum.pl -Wall -Werror -static -I/tmp/include /tmp/include/asm/unistd.h
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build ppc64,linux
package unix
const (
SYS_RESTART_SYSCALL = 0
SYS_EXIT = 1
SYS_FORK = 2
SYS_READ = 3
SYS_WRITE = 4
SYS_OPEN = 5
SYS_CLOSE = 6
SYS_WAITPID = 7
SYS_CREAT = 8
SYS_LINK = 9
SYS_UNLINK = 10
SYS_EXECVE = 11
SYS_CHDIR = 12
SYS_TIME = 13
SYS_MKNOD = 14
SYS_CHMOD = 15
SYS_LCHOWN = 16
SYS_BREAK = 17
SYS_OLDSTAT = 18
SYS_LSEEK = 19
SYS_GETPID = 20
SYS_MOUNT = 21
SYS_UMOUNT = 22
SYS_SETUID = 23
SYS_GETUID = 24
SYS_STIME = 25
SYS_PTRACE = 26
SYS_ALARM = 27
SYS_OLDFSTAT = 28
SYS_PAUSE = 29
SYS_UTIME = 30
SYS_STTY = 31
SYS_GTTY = 32
SYS_ACCESS = 33
SYS_NICE = 34
SYS_FTIME = 35
SYS_SYNC = 36
SYS_KILL = 37
SYS_RENAME = 38
SYS_MKDIR = 39
SYS_RMDIR = 40
SYS_DUP = 41
SYS_PIPE = 42
SYS_TIMES = 43
SYS_PROF = 44
SYS_BRK = 45
SYS_SETGID = 46
SYS_GETGID = 47
SYS_SIGNAL = 48
SYS_GETEUID = 49
SYS_GETEGID = 50
SYS_ACCT = 51
SYS_UMOUNT2 = 52
SYS_LOCK = 53
SYS_IOCTL = 54
SYS_FCNTL = 55
SYS_MPX = 56
SYS_SETPGID = 57
SYS_ULIMIT = 58
SYS_OLDOLDUNAME = 59
SYS_UMASK = 60
SYS_CHROOT = 61
SYS_USTAT = 62
SYS_DUP2 = 63
SYS_GETPPID = 64
SYS_GETPGRP = 65
SYS_SETSID = 66
SYS_SIGACTION = 67
SYS_SGETMASK = 68
SYS_SSETMASK = 69
SYS_SETREUID = 70
SYS_SETREGID = 71
SYS_SIGSUSPEND = 72
SYS_SIGPENDING = 73
SYS_SETHOSTNAME = 74
SYS_SETRLIMIT = 75
SYS_GETRLIMIT = 76
SYS_GETRUSAGE = 77
SYS_GETTIMEOFDAY = 78
SYS_SETTIMEOFDAY = 79
SYS_GETGROUPS = 80
SYS_SETGROUPS = 81
SYS_SELECT = 82
SYS_SYMLINK = 83
SYS_OLDLSTAT = 84
SYS_READLINK = 85
SYS_USELIB = 86
SYS_SWAPON = 87
SYS_REBOOT = 88
SYS_READDIR = 89
SYS_MMAP = 90
SYS_MUNMAP = 91
SYS_TRUNCATE = 92
SYS_FTRUNCATE = 93
SYS_FCHMOD = 94
SYS_FCHOWN = 95
SYS_GETPRIORITY = 96
SYS_SETPRIORITY = 97
SYS_PROFIL = 98
SYS_STATFS = 99
SYS_FSTATFS = 100
SYS_IOPERM = 101
SYS_SOCKETCALL = 102
SYS_SYSLOG = 103
SYS_SETITIMER = 104
SYS_GETITIMER = 105
SYS_STAT = 106
SYS_LSTAT = 107
SYS_FSTAT = 108
SYS_OLDUNAME = 109
SYS_IOPL = 110
SYS_VHANGUP = 111
SYS_IDLE = 112
SYS_VM86 = 113
SYS_WAIT4 = 114
SYS_SWAPOFF = 115
SYS_SYSINFO = 116
SYS_IPC = 117
SYS_FSYNC = 118
SYS_SIGRETURN = 119
SYS_CLONE = 120
SYS_SETDOMAINNAME = 121
SYS_UNAME = 122
SYS_MODIFY_LDT = 123
SYS_ADJTIMEX = 124
SYS_MPROTECT = 125
SYS_SIGPROCMASK = 126
SYS_CREATE_MODULE = 127
SYS_INIT_MODULE = 128
SYS_DELETE_MODULE = 129
SYS_GET_KERNEL_SYMS = 130
SYS_QUOTACTL = 131
SYS_GETPGID = 132
SYS_FCHDIR = 133
SYS_BDFLUSH = 134
SYS_SYSFS = 135
SYS_PERSONALITY = 136
SYS_AFS_SYSCALL = 137
SYS_SETFSUID = 138
SYS_SETFSGID = 139
SYS__LLSEEK = 140
SYS_GETDENTS = 141
SYS__NEWSELECT = 142
SYS_FLOCK = 143
SYS_MSYNC = 144
SYS_READV = 145
SYS_WRITEV = 146
SYS_GETSID = 147
SYS_FDATASYNC = 148
SYS__SYSCTL = 149
SYS_MLOCK = 150
SYS_MUNLOCK = 151
SYS_MLOCKALL = 152
SYS_MUNLOCKALL = 153
SYS_SCHED_SETPARAM = 154
SYS_SCHED_GETPARAM = 155
SYS_SCHED_SETSCHEDULER = 156
SYS_SCHED_GETSCHEDULER = 157
SYS_SCHED_YIELD = 158
SYS_SCHED_GET_PRIORITY_MAX = 159
SYS_SCHED_GET_PRIORITY_MIN = 160
SYS_SCHED_RR_GET_INTERVAL = 161
SYS_NANOSLEEP = 162
SYS_MREMAP = 163
SYS_SETRESUID = 164
SYS_GETRESUID = 165
SYS_QUERY_MODULE = 166
SYS_POLL = 167
SYS_NFSSERVCTL = 168
SYS_SETRESGID = 169
SYS_GETRESGID = 170
SYS_PRCTL = 171
SYS_RT_SIGRETURN = 172
SYS_RT_SIGACTION = 173
SYS_RT_SIGPROCMASK = 174
SYS_RT_SIGPENDING = 175
SYS_RT_SIGTIMEDWAIT = 176
SYS_RT_SIGQUEUEINFO = 177
SYS_RT_SIGSUSPEND = 178
SYS_PREAD64 = 179
SYS_PWRITE64 = 180
SYS_CHOWN = 181
SYS_GETCWD = 182
SYS_CAPGET = 183
SYS_CAPSET = 184
SYS_SIGALTSTACK = 185
SYS_SENDFILE = 186
SYS_GETPMSG = 187
SYS_PUTPMSG = 188
SYS_VFORK = 189
SYS_UGETRLIMIT = 190
SYS_READAHEAD = 191
SYS_PCICONFIG_READ = 198
SYS_PCICONFIG_WRITE = 199
SYS_PCICONFIG_IOBASE = 200
SYS_MULTIPLEXER = 201
SYS_GETDENTS64 = 202
SYS_PIVOT_ROOT = 203
SYS_MADVISE = 205
SYS_MINCORE = 206
SYS_GETTID = 207
SYS_TKILL = 208
SYS_SETXATTR = 209
SYS_LSETXATTR = 210
SYS_FSETXATTR = 211
SYS_GETXATTR = 212
SYS_LGETXATTR = 213
SYS_FGETXATTR = 214
SYS_LISTXATTR = 215
SYS_LLISTXATTR = 216
SYS_FLISTXATTR = 217
SYS_REMOVEXATTR = 218
SYS_LREMOVEXATTR = 219
SYS_FREMOVEXATTR = 220
SYS_FUTEX = 221
SYS_SCHED_SETAFFINITY = 222
SYS_SCHED_GETAFFINITY = 223
SYS_TUXCALL = 225
SYS_IO_SETUP = 227
SYS_IO_DESTROY = 228
SYS_IO_GETEVENTS = 229
SYS_IO_SUBMIT = 230
SYS_IO_CANCEL = 231
SYS_SET_TID_ADDRESS = 232
SYS_FADVISE64 = 233
SYS_EXIT_GROUP = 234
SYS_LOOKUP_DCOOKIE = 235
SYS_EPOLL_CREATE = 236
SYS_EPOLL_CTL = 237
SYS_EPOLL_WAIT = 238
SYS_REMAP_FILE_PAGES = 239
SYS_TIMER_CREATE = 240
SYS_TIMER_SETTIME = 241
SYS_TIMER_GETTIME = 242
SYS_TIMER_GETOVERRUN = 243
SYS_TIMER_DELETE = 244
SYS_CLOCK_SETTIME = 245
SYS_CLOCK_GETTIME = 246
SYS_CLOCK_GETRES = 247
SYS_CLOCK_NANOSLEEP = 248
SYS_SWAPCONTEXT = 249
SYS_TGKILL = 250
SYS_UTIMES = 251
SYS_STATFS64 = 252
SYS_FSTATFS64 = 253
SYS_RTAS = 255
SYS_SYS_DEBUG_SETCONTEXT = 256
SYS_MIGRATE_PAGES = 258
SYS_MBIND = 259
SYS_GET_MEMPOLICY = 260
SYS_SET_MEMPOLICY = 261
SYS_MQ_OPEN = 262
SYS_MQ_UNLINK = 263
SYS_MQ_TIMEDSEND = 264
SYS_MQ_TIMEDRECEIVE = 265
SYS_MQ_NOTIFY = 266
SYS_MQ_GETSETATTR = 267
SYS_KEXEC_LOAD = 268
SYS_ADD_KEY = 269
SYS_REQUEST_KEY = 270
SYS_KEYCTL = 271
SYS_WAITID = 272
SYS_IOPRIO_SET = 273
SYS_IOPRIO_GET = 274
SYS_INOTIFY_INIT = 275
SYS_INOTIFY_ADD_WATCH = 276
SYS_INOTIFY_RM_WATCH = 277
SYS_SPU_RUN = 278
SYS_SPU_CREATE = 279
SYS_PSELECT6 = 280
SYS_PPOLL = 281
SYS_UNSHARE = 282
SYS_SPLICE = 283
SYS_TEE = 284
SYS_VMSPLICE = 285
SYS_OPENAT = 286
SYS_MKDIRAT = 287
SYS_MKNODAT = 288
SYS_FCHOWNAT = 289
SYS_FUTIMESAT = 290
SYS_NEWFSTATAT = 291
SYS_UNLINKAT = 292
SYS_RENAMEAT = 293
SYS_LINKAT = 294
SYS_SYMLINKAT = 295
SYS_READLINKAT = 296
SYS_FCHMODAT = 297
SYS_FACCESSAT = 298
SYS_GET_ROBUST_LIST = 299
SYS_SET_ROBUST_LIST = 300
SYS_MOVE_PAGES = 301
SYS_GETCPU = 302
SYS_EPOLL_PWAIT = 303
SYS_UTIMENSAT = 304
SYS_SIGNALFD = 305
SYS_TIMERFD_CREATE = 306
SYS_EVENTFD = 307
SYS_SYNC_FILE_RANGE2 = 308
SYS_FALLOCATE = 309
SYS_SUBPAGE_PROT = 310
SYS_TIMERFD_SETTIME = 311
SYS_TIMERFD_GETTIME = 312
SYS_SIGNALFD4 = 313
SYS_EVENTFD2 = 314
SYS_EPOLL_CREATE1 = 315
SYS_DUP3 = 316
SYS_PIPE2 = 317
SYS_INOTIFY_INIT1 = 318
SYS_PERF_EVENT_OPEN = 319
SYS_PREADV = 320
SYS_PWRITEV = 321
SYS_RT_TGSIGQUEUEINFO = 322
SYS_FANOTIFY_INIT = 323
SYS_FANOTIFY_MARK = 324
SYS_PRLIMIT64 = 325
SYS_SOCKET = 326
SYS_BIND = 327
SYS_CONNECT = 328
SYS_LISTEN = 329
SYS_ACCEPT = 330
SYS_GETSOCKNAME = 331
SYS_GETPEERNAME = 332
SYS_SOCKETPAIR = 333
SYS_SEND = 334
SYS_SENDTO = 335
SYS_RECV = 336
SYS_RECVFROM = 337
SYS_SHUTDOWN = 338
SYS_SETSOCKOPT = 339
SYS_GETSOCKOPT = 340
SYS_SENDMSG = 341
SYS_RECVMSG = 342
SYS_RECVMMSG = 343
SYS_ACCEPT4 = 344
SYS_NAME_TO_HANDLE_AT = 345
SYS_OPEN_BY_HANDLE_AT = 346
SYS_CLOCK_ADJTIME = 347
SYS_SYNCFS = 348
SYS_SENDMMSG = 349
SYS_SETNS = 350
SYS_PROCESS_VM_READV = 351
SYS_PROCESS_VM_WRITEV = 352
SYS_FINIT_MODULE = 353
SYS_KCMP = 354
SYS_SCHED_SETATTR = 355
SYS_SCHED_GETATTR = 356
SYS_RENAMEAT2 = 357
SYS_SECCOMP = 358
SYS_GETRANDOM = 359
SYS_MEMFD_CREATE = 360
SYS_BPF = 361
SYS_EXECVEAT = 362
SYS_SWITCH_ENDIAN = 363
SYS_USERFAULTFD = 364
SYS_MEMBARRIER = 365
SYS_MLOCK2 = 378
SYS_COPY_FILE_RANGE = 379
SYS_PREADV2 = 380
SYS_PWRITEV2 = 381
SYS_KEXEC_FILE_LOAD = 382
SYS_STATX = 383
SYS_PKEY_ALLOC = 384
SYS_PKEY_FREE = 385
SYS_PKEY_MPROTECT = 386
)
| vendor/golang.org/x/sys/unix/zsysnum_linux_ppc64.go | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00029645583708770573,
0.00020962250709999353,
0.0001647200115257874,
0.00020548475731629878,
0.000036879642721032724
] |
{
"id": 2,
"code_window": [
"\tExternal bool `json:\"-\"`\n",
"\tPermission PermissionType `json:\"-\"`\n",
"}\n",
"\n",
"type UpdateTeamMemberCommand struct {\n",
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"}\n",
"\n",
"type RemoveTeamMemberCommand struct {\n",
"\tOrgId int64 `json:\"-\"`\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tUserId int64 `json:\"-\"`\n",
"\tOrgId int64 `json:\"-\"`\n",
"\tTeamId int64 `json:\"-\"`\n",
"\tPermission PermissionType `json:\"permission\"`\n",
"\tProtectLastAdmin bool `json:\"-\"`\n"
],
"file_path": "pkg/models/team_member.go",
"type": "replace",
"edit_start_line_idx": 37
} | import _ from 'lodash';
import moment from 'moment';
import tinycolor from 'tinycolor2';
import {
OK_COLOR,
ALERTING_COLOR,
NO_DATA_COLOR,
PENDING_COLOR,
DEFAULT_ANNOTATION_COLOR,
REGION_FILL_ALPHA,
} from '@grafana/ui';
import { MetricsPanelCtrl } from 'app/plugins/sdk';
import { AnnotationEvent } from './event';
export class EventManager {
event: AnnotationEvent;
editorOpen: boolean;
constructor(private panelCtrl: MetricsPanelCtrl) {}
editorClosed() {
this.event = null;
this.editorOpen = false;
this.panelCtrl.render();
}
editorOpened() {
this.editorOpen = true;
}
updateTime(range) {
if (!this.event) {
this.event = new AnnotationEvent();
this.event.dashboardId = this.panelCtrl.dashboard.id;
this.event.panelId = this.panelCtrl.panel.id;
}
// update time
this.event.time = moment(range.from);
this.event.isRegion = false;
if (range.to) {
this.event.timeEnd = moment(range.to);
this.event.isRegion = true;
}
this.panelCtrl.render();
}
editEvent(event, elem?) {
this.event = event;
this.panelCtrl.render();
}
addFlotEvents(annotations, flotOptions) {
if (!this.event && annotations.length === 0) {
return;
}
const types = {
$__alerting: {
color: ALERTING_COLOR,
position: 'BOTTOM',
markerSize: 5,
},
$__ok: {
color: OK_COLOR,
position: 'BOTTOM',
markerSize: 5,
},
$__no_data: {
color: NO_DATA_COLOR,
position: 'BOTTOM',
markerSize: 5,
},
$__pending: {
color: PENDING_COLOR,
position: 'BOTTOM',
markerSize: 5,
},
$__editing: {
color: DEFAULT_ANNOTATION_COLOR,
position: 'BOTTOM',
markerSize: 5,
},
};
if (this.event) {
if (this.event.isRegion) {
annotations = [
{
isRegion: true,
min: this.event.time.valueOf(),
timeEnd: this.event.timeEnd.valueOf(),
text: this.event.text,
eventType: '$__editing',
editModel: this.event,
},
];
} else {
annotations = [
{
min: this.event.time.valueOf(),
text: this.event.text,
editModel: this.event,
eventType: '$__editing',
},
];
}
} else {
// annotations from query
for (let i = 0; i < annotations.length; i++) {
const item = annotations[i];
// add properties used by jquery flot events
item.min = item.time;
item.max = item.time;
item.eventType = item.source.name;
if (item.newState) {
item.eventType = '$__' + item.newState;
continue;
}
if (!types[item.source.name]) {
types[item.source.name] = {
color: item.source.iconColor,
position: 'BOTTOM',
markerSize: 5,
};
}
}
}
const regions = getRegions(annotations);
addRegionMarking(regions, flotOptions);
const eventSectionHeight = 20;
const eventSectionMargin = 7;
flotOptions.grid.eventSectionHeight = eventSectionMargin;
flotOptions.xaxis.eventSectionHeight = eventSectionHeight;
flotOptions.events = {
levels: _.keys(types).length + 1,
data: annotations,
types: types,
manager: this,
};
}
}
function getRegions(events) {
return _.filter(events, 'isRegion');
}
function addRegionMarking(regions, flotOptions) {
const markings = flotOptions.grid.markings;
const defaultColor = DEFAULT_ANNOTATION_COLOR;
let fillColor;
_.each(regions, region => {
if (region.source) {
fillColor = region.source.iconColor || defaultColor;
} else {
fillColor = defaultColor;
}
fillColor = addAlphaToRGB(fillColor, REGION_FILL_ALPHA);
markings.push({
xaxis: { from: region.min, to: region.timeEnd },
color: fillColor,
});
});
}
function addAlphaToRGB(colorString: string, alpha: number): string {
const color = tinycolor(colorString);
if (color.isValid()) {
color.setAlpha(alpha);
return color.toRgbString();
} else {
return colorString;
}
}
| public/app/features/annotations/event_manager.ts | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.0001754572440404445,
0.0001699421991361305,
0.0001662913418840617,
0.0001700480788713321,
0.000002277666908412357
] |
{
"id": 3,
"code_window": [
"\n",
"\t\tif !exists {\n",
"\t\t\treturn m.ErrTeamMemberNotFound\n",
"\t\t}\n",
"\n",
"\t\tif cmd.Permission != m.PERMISSION_ADMIN {\n",
"\t\t\tcmd.Permission = 0\n",
"\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tif cmd.ProtectLastAdmin {\n",
"\t\t\tlastAdmin, err := isLastAdmin(sess, cmd.OrgId, cmd.TeamId, cmd.UserId)\n",
"\t\t\tif err != nil {\n",
"\t\t\t\treturn err\n",
"\t\t\t}\n",
"\n",
"\t\t\tif lastAdmin {\n",
"\t\t\t\treturn m.ErrLastTeamAdmin\n",
"\t\t\t}\n",
"\n",
"\t\t}\n",
"\n"
],
"file_path": "pkg/services/sqlstore/team.go",
"type": "add",
"edit_start_line_idx": 273
} | package sqlstore
import (
"context"
"fmt"
"testing"
. "github.com/smartystreets/goconvey/convey"
m "github.com/grafana/grafana/pkg/models"
)
func TestTeamCommandsAndQueries(t *testing.T) {
Convey("Testing Team commands & queries", t, func() {
InitTestDB(t)
Convey("Given saved users and two teams", func() {
var userIds []int64
for i := 0; i < 5; i++ {
userCmd := &m.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
}
err := CreateUser(context.Background(), userCmd)
So(err, ShouldBeNil)
userIds = append(userIds, userCmd.Result.Id)
}
var testOrgId int64 = 1
group1 := m.CreateTeamCommand{OrgId: testOrgId, Name: "group1 name", Email: "[email protected]"}
group2 := m.CreateTeamCommand{OrgId: testOrgId, Name: "group2 name", Email: "[email protected]"}
err := CreateTeam(&group1)
So(err, ShouldBeNil)
err = CreateTeam(&group2)
So(err, ShouldBeNil)
Convey("Should be able to create teams and add users", func() {
query := &m.SearchTeamsQuery{OrgId: testOrgId, Name: "group1 name", Page: 1, Limit: 10}
err = SearchTeams(query)
So(err, ShouldBeNil)
So(query.Page, ShouldEqual, 1)
team1 := query.Result.Teams[0]
So(team1.Name, ShouldEqual, "group1 name")
So(team1.Email, ShouldEqual, "[email protected]")
So(team1.OrgId, ShouldEqual, testOrgId)
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[1], External: true})
So(err, ShouldBeNil)
q1 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id}
err = GetTeamMembers(q1)
So(err, ShouldBeNil)
So(q1.Result, ShouldHaveLength, 2)
So(q1.Result[0].TeamId, ShouldEqual, team1.Id)
So(q1.Result[0].Login, ShouldEqual, "loginuser0")
So(q1.Result[0].OrgId, ShouldEqual, testOrgId)
So(q1.Result[1].TeamId, ShouldEqual, team1.Id)
So(q1.Result[1].Login, ShouldEqual, "loginuser1")
So(q1.Result[1].OrgId, ShouldEqual, testOrgId)
So(q1.Result[1].External, ShouldEqual, true)
q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id, External: true}
err = GetTeamMembers(q2)
So(err, ShouldBeNil)
So(q2.Result, ShouldHaveLength, 1)
So(q2.Result[0].TeamId, ShouldEqual, team1.Id)
So(q2.Result[0].Login, ShouldEqual, "loginuser1")
So(q2.Result[0].OrgId, ShouldEqual, testOrgId)
So(q2.Result[0].External, ShouldEqual, true)
})
Convey("Should be able to update users in a team", func() {
userId := userIds[0]
team := group1.Result
addMemberCmd := m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team.Id, UserId: userId}
err = AddTeamMember(&addMemberCmd)
So(err, ShouldBeNil)
qBeforeUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qBeforeUpdate)
So(err, ShouldBeNil)
So(qBeforeUpdate.Result[0].Permission, ShouldEqual, 0)
err = UpdateTeamMember(&m.UpdateTeamMemberCommand{
UserId: userId,
OrgId: testOrgId,
TeamId: team.Id,
Permission: m.PERMISSION_ADMIN,
})
So(err, ShouldBeNil)
qAfterUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qAfterUpdate)
So(err, ShouldBeNil)
So(qAfterUpdate.Result[0].Permission, ShouldEqual, m.PERMISSION_ADMIN)
})
Convey("Should default to member permission level when updating a user with invalid permission level", func() {
userID := userIds[0]
team := group1.Result
addMemberCmd := m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team.Id, UserId: userID}
err = AddTeamMember(&addMemberCmd)
So(err, ShouldBeNil)
qBeforeUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qBeforeUpdate)
So(err, ShouldBeNil)
So(qBeforeUpdate.Result[0].Permission, ShouldEqual, 0)
invalidPermissionLevel := m.PERMISSION_EDIT
err = UpdateTeamMember(&m.UpdateTeamMemberCommand{
UserId: userID,
OrgId: testOrgId,
TeamId: team.Id,
Permission: invalidPermissionLevel,
})
So(err, ShouldBeNil)
qAfterUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qAfterUpdate)
So(err, ShouldBeNil)
So(qAfterUpdate.Result[0].Permission, ShouldEqual, 0)
})
Convey("Shouldn't be able to update a user not in the team.", func() {
err = UpdateTeamMember(&m.UpdateTeamMemberCommand{
UserId: 1,
OrgId: testOrgId,
TeamId: group1.Result.Id,
Permission: m.PERMISSION_ADMIN,
})
So(err, ShouldEqual, m.ErrTeamMemberNotFound)
})
Convey("Should be able to search for teams", func() {
query := &m.SearchTeamsQuery{OrgId: testOrgId, Query: "group", Page: 1}
err = SearchTeams(query)
So(err, ShouldBeNil)
So(len(query.Result.Teams), ShouldEqual, 2)
So(query.Result.TotalCount, ShouldEqual, 2)
query2 := &m.SearchTeamsQuery{OrgId: testOrgId, Query: ""}
err = SearchTeams(query2)
So(err, ShouldBeNil)
So(len(query2.Result.Teams), ShouldEqual, 2)
})
Convey("Should be able to return all teams a user is member of", func() {
groupId := group2.Result.Id
err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[0]})
So(err, ShouldBeNil)
query := &m.GetTeamsByUserQuery{OrgId: testOrgId, UserId: userIds[0]}
err = GetTeamsByUser(query)
So(err, ShouldBeNil)
So(len(query.Result), ShouldEqual, 1)
So(query.Result[0].Name, ShouldEqual, "group2 name")
So(query.Result[0].Email, ShouldEqual, "[email protected]")
})
Convey("Should be able to remove users from a group", func() {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: group1.Result.Id}
err = GetTeamMembers(q2)
So(err, ShouldBeNil)
So(len(q2.Result), ShouldEqual, 0)
})
Convey("When ProtectLastAdmin is set to true", func() {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: m.PERMISSION_ADMIN})
So(err, ShouldBeNil)
Convey("A user should not be able to remove the last admin", func() {
err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})
So(err, ShouldEqual, m.ErrLastTeamAdmin)
})
Convey("A user should be able to remove an admin if there are other admins", func() {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})
err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})
So(err, ShouldEqual, nil)
})
})
Convey("Should be able to remove a group with users and permissions", func() {
groupId := group2.Result.Id
err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]})
So(err, ShouldBeNil)
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[2]})
So(err, ShouldBeNil)
err = testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: testOrgId, Permission: m.PERMISSION_EDIT, TeamId: groupId})
So(err, ShouldBeNil)
err = DeleteTeam(&m.DeleteTeamCommand{OrgId: testOrgId, Id: groupId})
So(err, ShouldBeNil)
query := &m.GetTeamByIdQuery{OrgId: testOrgId, Id: groupId}
err = GetTeamById(query)
So(err, ShouldEqual, m.ErrTeamNotFound)
permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: testOrgId}
err = GetDashboardAclInfoList(permQuery)
So(err, ShouldBeNil)
So(len(permQuery.Result), ShouldEqual, 0)
})
})
})
}
| pkg/services/sqlstore/team_test.go | 1 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.007426316384226084,
0.0010414874413982034,
0.00016329926438629627,
0.00031092038261704147,
0.0017639980651438236
] |
{
"id": 3,
"code_window": [
"\n",
"\t\tif !exists {\n",
"\t\t\treturn m.ErrTeamMemberNotFound\n",
"\t\t}\n",
"\n",
"\t\tif cmd.Permission != m.PERMISSION_ADMIN {\n",
"\t\t\tcmd.Permission = 0\n",
"\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tif cmd.ProtectLastAdmin {\n",
"\t\t\tlastAdmin, err := isLastAdmin(sess, cmd.OrgId, cmd.TeamId, cmd.UserId)\n",
"\t\t\tif err != nil {\n",
"\t\t\t\treturn err\n",
"\t\t\t}\n",
"\n",
"\t\t\tif lastAdmin {\n",
"\t\t\t\treturn m.ErrLastTeamAdmin\n",
"\t\t\t}\n",
"\n",
"\t\t}\n",
"\n"
],
"file_path": "pkg/services/sqlstore/team.go",
"type": "add",
"edit_start_line_idx": 273
} | // Copyright 2017 The Xorm Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package xorm
import (
"fmt"
"reflect"
"strconv"
"strings"
"time"
"github.com/go-xorm/builder"
"github.com/go-xorm/core"
)
func (session *Session) genQuerySQL(sqlorArgs ...interface{}) (string, []interface{}, error) {
if len(sqlorArgs) > 0 {
return convertSQLOrArgs(sqlorArgs...)
}
if session.statement.RawSQL != "" {
return session.statement.RawSQL, session.statement.RawParams, nil
}
if len(session.statement.TableName()) <= 0 {
return "", nil, ErrTableNotFound
}
var columnStr = session.statement.ColumnStr
if len(session.statement.selectStr) > 0 {
columnStr = session.statement.selectStr
} else {
if session.statement.JoinStr == "" {
if columnStr == "" {
if session.statement.GroupByStr != "" {
columnStr = session.engine.quoteColumns(session.statement.GroupByStr)
} else {
columnStr = session.statement.genColumnStr()
}
}
} else {
if columnStr == "" {
if session.statement.GroupByStr != "" {
columnStr = session.engine.quoteColumns(session.statement.GroupByStr)
} else {
columnStr = "*"
}
}
}
if columnStr == "" {
columnStr = "*"
}
}
if err := session.statement.processIDParam(); err != nil {
return "", nil, err
}
condSQL, condArgs, err := builder.ToSQL(session.statement.cond)
if err != nil {
return "", nil, err
}
args := append(session.statement.joinArgs, condArgs...)
sqlStr, err := session.statement.genSelectSQL(columnStr, condSQL, true, true)
if err != nil {
return "", nil, err
}
// for mssql and use limit
qs := strings.Count(sqlStr, "?")
if len(args)*2 == qs {
args = append(args, args...)
}
return sqlStr, args, nil
}
// Query runs a raw sql and return records as []map[string][]byte
func (session *Session) Query(sqlorArgs ...interface{}) ([]map[string][]byte, error) {
if session.isAutoClose {
defer session.Close()
}
sqlStr, args, err := session.genQuerySQL(sqlorArgs...)
if err != nil {
return nil, err
}
return session.queryBytes(sqlStr, args...)
}
func value2String(rawValue *reflect.Value) (str string, err error) {
aa := reflect.TypeOf((*rawValue).Interface())
vv := reflect.ValueOf((*rawValue).Interface())
switch aa.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
str = strconv.FormatInt(vv.Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
str = strconv.FormatUint(vv.Uint(), 10)
case reflect.Float32, reflect.Float64:
str = strconv.FormatFloat(vv.Float(), 'f', -1, 64)
case reflect.String:
str = vv.String()
case reflect.Array, reflect.Slice:
switch aa.Elem().Kind() {
case reflect.Uint8:
data := rawValue.Interface().([]byte)
str = string(data)
if str == "\x00" {
str = "0"
}
default:
err = fmt.Errorf("Unsupported struct type %v", vv.Type().Name())
}
// time type
case reflect.Struct:
if aa.ConvertibleTo(core.TimeType) {
str = vv.Convert(core.TimeType).Interface().(time.Time).Format(time.RFC3339Nano)
} else {
err = fmt.Errorf("Unsupported struct type %v", vv.Type().Name())
}
case reflect.Bool:
str = strconv.FormatBool(vv.Bool())
case reflect.Complex128, reflect.Complex64:
str = fmt.Sprintf("%v", vv.Complex())
/* TODO: unsupported types below
case reflect.Map:
case reflect.Ptr:
case reflect.Uintptr:
case reflect.UnsafePointer:
case reflect.Chan, reflect.Func, reflect.Interface:
*/
default:
err = fmt.Errorf("Unsupported struct type %v", vv.Type().Name())
}
return
}
func row2mapStr(rows *core.Rows, fields []string) (resultsMap map[string]string, err error) {
result := make(map[string]string)
scanResultContainers := make([]interface{}, len(fields))
for i := 0; i < len(fields); i++ {
var scanResultContainer interface{}
scanResultContainers[i] = &scanResultContainer
}
if err := rows.Scan(scanResultContainers...); err != nil {
return nil, err
}
for ii, key := range fields {
rawValue := reflect.Indirect(reflect.ValueOf(scanResultContainers[ii]))
// if row is null then as empty string
if rawValue.Interface() == nil {
result[key] = ""
continue
}
if data, err := value2String(&rawValue); err == nil {
result[key] = data
} else {
return nil, err
}
}
return result, nil
}
func row2sliceStr(rows *core.Rows, fields []string) (results []string, err error) {
result := make([]string, 0, len(fields))
scanResultContainers := make([]interface{}, len(fields))
for i := 0; i < len(fields); i++ {
var scanResultContainer interface{}
scanResultContainers[i] = &scanResultContainer
}
if err := rows.Scan(scanResultContainers...); err != nil {
return nil, err
}
for i := 0; i < len(fields); i++ {
rawValue := reflect.Indirect(reflect.ValueOf(scanResultContainers[i]))
// if row is null then as empty string
if rawValue.Interface() == nil {
result = append(result, "")
continue
}
if data, err := value2String(&rawValue); err == nil {
result = append(result, data)
} else {
return nil, err
}
}
return result, nil
}
func rows2Strings(rows *core.Rows) (resultsSlice []map[string]string, err error) {
fields, err := rows.Columns()
if err != nil {
return nil, err
}
for rows.Next() {
result, err := row2mapStr(rows, fields)
if err != nil {
return nil, err
}
resultsSlice = append(resultsSlice, result)
}
return resultsSlice, nil
}
func rows2SliceString(rows *core.Rows) (resultsSlice [][]string, err error) {
fields, err := rows.Columns()
if err != nil {
return nil, err
}
for rows.Next() {
record, err := row2sliceStr(rows, fields)
if err != nil {
return nil, err
}
resultsSlice = append(resultsSlice, record)
}
return resultsSlice, nil
}
// QueryString runs a raw sql and return records as []map[string]string
func (session *Session) QueryString(sqlorArgs ...interface{}) ([]map[string]string, error) {
if session.isAutoClose {
defer session.Close()
}
sqlStr, args, err := session.genQuerySQL(sqlorArgs...)
if err != nil {
return nil, err
}
rows, err := session.queryRows(sqlStr, args...)
if err != nil {
return nil, err
}
defer rows.Close()
return rows2Strings(rows)
}
// QuerySliceString runs a raw sql and return records as [][]string
func (session *Session) QuerySliceString(sqlorArgs ...interface{}) ([][]string, error) {
if session.isAutoClose {
defer session.Close()
}
sqlStr, args, err := session.genQuerySQL(sqlorArgs...)
if err != nil {
return nil, err
}
rows, err := session.queryRows(sqlStr, args...)
if err != nil {
return nil, err
}
defer rows.Close()
return rows2SliceString(rows)
}
func row2mapInterface(rows *core.Rows, fields []string) (resultsMap map[string]interface{}, err error) {
resultsMap = make(map[string]interface{}, len(fields))
scanResultContainers := make([]interface{}, len(fields))
for i := 0; i < len(fields); i++ {
var scanResultContainer interface{}
scanResultContainers[i] = &scanResultContainer
}
if err := rows.Scan(scanResultContainers...); err != nil {
return nil, err
}
for ii, key := range fields {
resultsMap[key] = reflect.Indirect(reflect.ValueOf(scanResultContainers[ii])).Interface()
}
return
}
func rows2Interfaces(rows *core.Rows) (resultsSlice []map[string]interface{}, err error) {
fields, err := rows.Columns()
if err != nil {
return nil, err
}
for rows.Next() {
result, err := row2mapInterface(rows, fields)
if err != nil {
return nil, err
}
resultsSlice = append(resultsSlice, result)
}
return resultsSlice, nil
}
// QueryInterface runs a raw sql and return records as []map[string]interface{}
func (session *Session) QueryInterface(sqlorArgs ...interface{}) ([]map[string]interface{}, error) {
if session.isAutoClose {
defer session.Close()
}
sqlStr, args, err := session.genQuerySQL(sqlorArgs...)
if err != nil {
return nil, err
}
rows, err := session.queryRows(sqlStr, args...)
if err != nil {
return nil, err
}
defer rows.Close()
return rows2Interfaces(rows)
}
| vendor/github.com/go-xorm/xorm/session_query.go | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.0002017914957832545,
0.0001721900189295411,
0.00016414256242569536,
0.00017088695312850177,
0.000006827973265899345
] |
{
"id": 3,
"code_window": [
"\n",
"\t\tif !exists {\n",
"\t\t\treturn m.ErrTeamMemberNotFound\n",
"\t\t}\n",
"\n",
"\t\tif cmd.Permission != m.PERMISSION_ADMIN {\n",
"\t\t\tcmd.Permission = 0\n",
"\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tif cmd.ProtectLastAdmin {\n",
"\t\t\tlastAdmin, err := isLastAdmin(sess, cmd.OrgId, cmd.TeamId, cmd.UserId)\n",
"\t\t\tif err != nil {\n",
"\t\t\t\treturn err\n",
"\t\t\t}\n",
"\n",
"\t\t\tif lastAdmin {\n",
"\t\t\t\treturn m.ErrLastTeamAdmin\n",
"\t\t\t}\n",
"\n",
"\t\t}\n",
"\n"
],
"file_path": "pkg/services/sqlstore/team.go",
"type": "add",
"edit_start_line_idx": 273
} | import React from 'react';
import { shallow } from 'enzyme';
import { DataSourcesListPage, Props } from './DataSourcesListPage';
import { NavModel } from 'app/types';
import { DataSourceSettings } from '@grafana/ui/src/types';
import { LayoutModes } from '../../core/components/LayoutSelector/LayoutSelector';
import { getMockDataSources } from './__mocks__/dataSourcesMocks';
import { setDataSourcesSearchQuery, setDataSourcesLayoutMode } from './state/actions';
const setup = (propOverrides?: object) => {
const props: Props = {
dataSources: [] as DataSourceSettings[],
layoutMode: LayoutModes.Grid,
loadDataSources: jest.fn(),
navModel: {
main: {
text: 'Configuration',
},
node: {
text: 'Data Sources',
},
} as NavModel,
dataSourcesCount: 0,
searchQuery: '',
setDataSourcesSearchQuery,
setDataSourcesLayoutMode,
hasFetched: false,
};
Object.assign(props, propOverrides);
return shallow(<DataSourcesListPage {...props} />);
};
describe('Render', () => {
it('should render component', () => {
const wrapper = setup();
expect(wrapper).toMatchSnapshot();
});
it('should render action bar and datasources', () => {
const wrapper = setup({
dataSources: getMockDataSources(5),
dataSourcesCount: 5,
hasFetched: true,
});
expect(wrapper).toMatchSnapshot();
});
});
| public/app/features/datasources/DataSourcesListPage.test.tsx | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017513064085505903,
0.00017359845514874905,
0.00017184659373015165,
0.00017375077004544437,
0.0000010684750577638624
] |
{
"id": 3,
"code_window": [
"\n",
"\t\tif !exists {\n",
"\t\t\treturn m.ErrTeamMemberNotFound\n",
"\t\t}\n",
"\n",
"\t\tif cmd.Permission != m.PERMISSION_ADMIN {\n",
"\t\t\tcmd.Permission = 0\n",
"\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tif cmd.ProtectLastAdmin {\n",
"\t\t\tlastAdmin, err := isLastAdmin(sess, cmd.OrgId, cmd.TeamId, cmd.UserId)\n",
"\t\t\tif err != nil {\n",
"\t\t\t\treturn err\n",
"\t\t\t}\n",
"\n",
"\t\t\tif lastAdmin {\n",
"\t\t\t\treturn m.ErrLastTeamAdmin\n",
"\t\t\t}\n",
"\n",
"\t\t}\n",
"\n"
],
"file_path": "pkg/services/sqlstore/team.go",
"type": "add",
"edit_start_line_idx": 273
} | export { FolderPickerCtrl } from './FolderPickerCtrl';
| public/app/features/dashboard/components/FolderPicker/index.ts | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017299281898885965,
0.00017299281898885965,
0.00017299281898885965,
0.00017299281898885965,
0
] |
{
"id": 4,
"code_window": [
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove an admin if there are other admins\", func() {\n",
"\t\t\t\t\terr = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "replace",
"edit_start_line_idx": 192
} | package sqlstore
import (
"context"
"fmt"
"testing"
. "github.com/smartystreets/goconvey/convey"
m "github.com/grafana/grafana/pkg/models"
)
func TestTeamCommandsAndQueries(t *testing.T) {
Convey("Testing Team commands & queries", t, func() {
InitTestDB(t)
Convey("Given saved users and two teams", func() {
var userIds []int64
for i := 0; i < 5; i++ {
userCmd := &m.CreateUserCommand{
Email: fmt.Sprint("user", i, "@test.com"),
Name: fmt.Sprint("user", i),
Login: fmt.Sprint("loginuser", i),
}
err := CreateUser(context.Background(), userCmd)
So(err, ShouldBeNil)
userIds = append(userIds, userCmd.Result.Id)
}
var testOrgId int64 = 1
group1 := m.CreateTeamCommand{OrgId: testOrgId, Name: "group1 name", Email: "[email protected]"}
group2 := m.CreateTeamCommand{OrgId: testOrgId, Name: "group2 name", Email: "[email protected]"}
err := CreateTeam(&group1)
So(err, ShouldBeNil)
err = CreateTeam(&group2)
So(err, ShouldBeNil)
Convey("Should be able to create teams and add users", func() {
query := &m.SearchTeamsQuery{OrgId: testOrgId, Name: "group1 name", Page: 1, Limit: 10}
err = SearchTeams(query)
So(err, ShouldBeNil)
So(query.Page, ShouldEqual, 1)
team1 := query.Result.Teams[0]
So(team1.Name, ShouldEqual, "group1 name")
So(team1.Email, ShouldEqual, "[email protected]")
So(team1.OrgId, ShouldEqual, testOrgId)
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[1], External: true})
So(err, ShouldBeNil)
q1 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id}
err = GetTeamMembers(q1)
So(err, ShouldBeNil)
So(q1.Result, ShouldHaveLength, 2)
So(q1.Result[0].TeamId, ShouldEqual, team1.Id)
So(q1.Result[0].Login, ShouldEqual, "loginuser0")
So(q1.Result[0].OrgId, ShouldEqual, testOrgId)
So(q1.Result[1].TeamId, ShouldEqual, team1.Id)
So(q1.Result[1].Login, ShouldEqual, "loginuser1")
So(q1.Result[1].OrgId, ShouldEqual, testOrgId)
So(q1.Result[1].External, ShouldEqual, true)
q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id, External: true}
err = GetTeamMembers(q2)
So(err, ShouldBeNil)
So(q2.Result, ShouldHaveLength, 1)
So(q2.Result[0].TeamId, ShouldEqual, team1.Id)
So(q2.Result[0].Login, ShouldEqual, "loginuser1")
So(q2.Result[0].OrgId, ShouldEqual, testOrgId)
So(q2.Result[0].External, ShouldEqual, true)
})
Convey("Should be able to update users in a team", func() {
userId := userIds[0]
team := group1.Result
addMemberCmd := m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team.Id, UserId: userId}
err = AddTeamMember(&addMemberCmd)
So(err, ShouldBeNil)
qBeforeUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qBeforeUpdate)
So(err, ShouldBeNil)
So(qBeforeUpdate.Result[0].Permission, ShouldEqual, 0)
err = UpdateTeamMember(&m.UpdateTeamMemberCommand{
UserId: userId,
OrgId: testOrgId,
TeamId: team.Id,
Permission: m.PERMISSION_ADMIN,
})
So(err, ShouldBeNil)
qAfterUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qAfterUpdate)
So(err, ShouldBeNil)
So(qAfterUpdate.Result[0].Permission, ShouldEqual, m.PERMISSION_ADMIN)
})
Convey("Should default to member permission level when updating a user with invalid permission level", func() {
userID := userIds[0]
team := group1.Result
addMemberCmd := m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team.Id, UserId: userID}
err = AddTeamMember(&addMemberCmd)
So(err, ShouldBeNil)
qBeforeUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qBeforeUpdate)
So(err, ShouldBeNil)
So(qBeforeUpdate.Result[0].Permission, ShouldEqual, 0)
invalidPermissionLevel := m.PERMISSION_EDIT
err = UpdateTeamMember(&m.UpdateTeamMemberCommand{
UserId: userID,
OrgId: testOrgId,
TeamId: team.Id,
Permission: invalidPermissionLevel,
})
So(err, ShouldBeNil)
qAfterUpdate := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team.Id}
err = GetTeamMembers(qAfterUpdate)
So(err, ShouldBeNil)
So(qAfterUpdate.Result[0].Permission, ShouldEqual, 0)
})
Convey("Shouldn't be able to update a user not in the team.", func() {
err = UpdateTeamMember(&m.UpdateTeamMemberCommand{
UserId: 1,
OrgId: testOrgId,
TeamId: group1.Result.Id,
Permission: m.PERMISSION_ADMIN,
})
So(err, ShouldEqual, m.ErrTeamMemberNotFound)
})
Convey("Should be able to search for teams", func() {
query := &m.SearchTeamsQuery{OrgId: testOrgId, Query: "group", Page: 1}
err = SearchTeams(query)
So(err, ShouldBeNil)
So(len(query.Result.Teams), ShouldEqual, 2)
So(query.Result.TotalCount, ShouldEqual, 2)
query2 := &m.SearchTeamsQuery{OrgId: testOrgId, Query: ""}
err = SearchTeams(query2)
So(err, ShouldBeNil)
So(len(query2.Result.Teams), ShouldEqual, 2)
})
Convey("Should be able to return all teams a user is member of", func() {
groupId := group2.Result.Id
err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[0]})
So(err, ShouldBeNil)
query := &m.GetTeamsByUserQuery{OrgId: testOrgId, UserId: userIds[0]}
err = GetTeamsByUser(query)
So(err, ShouldBeNil)
So(len(query.Result), ShouldEqual, 1)
So(query.Result[0].Name, ShouldEqual, "group2 name")
So(query.Result[0].Email, ShouldEqual, "[email protected]")
})
Convey("Should be able to remove users from a group", func() {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0]})
So(err, ShouldBeNil)
q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: group1.Result.Id}
err = GetTeamMembers(q2)
So(err, ShouldBeNil)
So(len(q2.Result), ShouldEqual, 0)
})
Convey("When ProtectLastAdmin is set to true", func() {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: m.PERMISSION_ADMIN})
So(err, ShouldBeNil)
Convey("A user should not be able to remove the last admin", func() {
err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})
So(err, ShouldEqual, m.ErrLastTeamAdmin)
})
Convey("A user should be able to remove an admin if there are other admins", func() {
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})
err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})
So(err, ShouldEqual, nil)
})
})
Convey("Should be able to remove a group with users and permissions", func() {
groupId := group2.Result.Id
err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]})
So(err, ShouldBeNil)
err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[2]})
So(err, ShouldBeNil)
err = testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: testOrgId, Permission: m.PERMISSION_EDIT, TeamId: groupId})
So(err, ShouldBeNil)
err = DeleteTeam(&m.DeleteTeamCommand{OrgId: testOrgId, Id: groupId})
So(err, ShouldBeNil)
query := &m.GetTeamByIdQuery{OrgId: testOrgId, Id: groupId}
err = GetTeamById(query)
So(err, ShouldEqual, m.ErrTeamNotFound)
permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: testOrgId}
err = GetDashboardAclInfoList(permQuery)
So(err, ShouldBeNil)
So(len(permQuery.Result), ShouldEqual, 0)
})
})
})
}
| pkg/services/sqlstore/team_test.go | 1 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.9971634745597839,
0.33743104338645935,
0.0001689078490016982,
0.015388011001050472,
0.4312041699886322
] |
{
"id": 4,
"code_window": [
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove an admin if there are other admins\", func() {\n",
"\t\t\t\t\terr = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "replace",
"edit_start_line_idx": 192
} | Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.
| vendor/github.com/gosimple/slug/LICENSE | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00018066303164232522,
0.0001762515603331849,
0.00017082536942325532,
0.000176172616193071,
0.000002376008296778309
] |
{
"id": 4,
"code_window": [
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove an admin if there are other admins\", func() {\n",
"\t\t\t\t\terr = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "replace",
"edit_start_line_idx": 192
} | package yamux
import (
"fmt"
"io"
"log"
"os"
"time"
)
// Config is used to tune the Yamux session
type Config struct {
// AcceptBacklog is used to limit how many streams may be
// waiting an accept.
AcceptBacklog int
// EnableKeepalive is used to do a period keep alive
// messages using a ping.
EnableKeepAlive bool
// KeepAliveInterval is how often to perform the keep alive
KeepAliveInterval time.Duration
// ConnectionWriteTimeout is meant to be a "safety valve" timeout after
// we which will suspect a problem with the underlying connection and
// close it. This is only applied to writes, where's there's generally
// an expectation that things will move along quickly.
ConnectionWriteTimeout time.Duration
// MaxStreamWindowSize is used to control the maximum
// window size that we allow for a stream.
MaxStreamWindowSize uint32
// LogOutput is used to control the log destination. Either Logger or
// LogOutput can be set, not both.
LogOutput io.Writer
// Logger is used to pass in the logger to be used. Either Logger or
// LogOutput can be set, not both.
Logger *log.Logger
}
// DefaultConfig is used to return a default configuration
func DefaultConfig() *Config {
return &Config{
AcceptBacklog: 256,
EnableKeepAlive: true,
KeepAliveInterval: 30 * time.Second,
ConnectionWriteTimeout: 10 * time.Second,
MaxStreamWindowSize: initialStreamWindow,
LogOutput: os.Stderr,
}
}
// VerifyConfig is used to verify the sanity of configuration
func VerifyConfig(config *Config) error {
if config.AcceptBacklog <= 0 {
return fmt.Errorf("backlog must be positive")
}
if config.KeepAliveInterval == 0 {
return fmt.Errorf("keep-alive interval must be positive")
}
if config.MaxStreamWindowSize < initialStreamWindow {
return fmt.Errorf("MaxStreamWindowSize must be larger than %d", initialStreamWindow)
}
if config.LogOutput != nil && config.Logger != nil {
return fmt.Errorf("both Logger and LogOutput may not be set, select one")
} else if config.LogOutput == nil && config.Logger == nil {
return fmt.Errorf("one of Logger or LogOutput must be set, select one")
}
return nil
}
// Server is used to initialize a new server-side connection.
// There must be at most one server-side connection. If a nil config is
// provided, the DefaultConfiguration will be used.
func Server(conn io.ReadWriteCloser, config *Config) (*Session, error) {
if config == nil {
config = DefaultConfig()
}
if err := VerifyConfig(config); err != nil {
return nil, err
}
return newSession(config, conn, false), nil
}
// Client is used to initialize a new client-side connection.
// There must be at most one client-side connection.
func Client(conn io.ReadWriteCloser, config *Config) (*Session, error) {
if config == nil {
config = DefaultConfig()
}
if err := VerifyConfig(config); err != nil {
return nil, err
}
return newSession(config, conn, true), nil
}
| vendor/github.com/hashicorp/yamux/mux.go | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.0003150825505144894,
0.0001849605469033122,
0.0001653419021749869,
0.00017084850696846843,
0.000043471562094055116
] |
{
"id": 4,
"code_window": [
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove an admin if there are other admins\", func() {\n",
"\t\t\t\t\terr = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "replace",
"edit_start_line_idx": 192
} | .tabbed-view {
display: flex;
flex-direction: column;
height: 100%;
flex-grow: 1;
&.tabbed-view--new {
padding: 0 0 0 0;
height: 100%;
}
}
.tabbed-view-header {
box-shadow: $page-header-shadow;
border-bottom: 1px solid $page-header-border-color;
padding: 0 $dashboard-padding;
@include clearfix();
}
.tabbed-view-title {
float: left;
padding-top: 0.5rem;
margin: 0 $spacer * 3 0 0;
}
.tabbed-view-panel-title {
float: left;
padding-top: 9px;
margin: 0 2rem 0 0;
}
.tabbed-view-close-btn {
float: right;
margin: 0;
background-color: transparent;
border: none;
padding: $tabs-padding;
color: $text-color;
i {
font-size: 120%;
}
&:hover {
color: $text-color-strong;
}
}
.tabbed-view-body {
padding: $spacer * 2 $spacer $spacer $spacer;
display: flex;
flex-direction: column;
flex: 1;
&--small {
min-height: 0px;
padding-bottom: 0px;
}
}
.section-heading {
font-size: $font-size-md;
margin-bottom: 0.6rem;
}
| public/sass/components/_tabbed_view.scss | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017570468480698764,
0.00017241392924916,
0.00016325160686392337,
0.00017372063302900642,
0.000003978263521275949
] |
{
"id": 5,
"code_window": [
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t})\n",
"\n",
"\t\t\tConvey(\"Should be able to remove a group with users and permissions\", func() {\n",
"\t\t\t\tgroupId := group2.Result.Id\n",
"\t\t\t\terr := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]})\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\tConvey(\"A user should not be able to remove the admin permission for the last admin\", func() {\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove the admin permission if there are other admins\", func() {\n",
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "add",
"edit_start_line_idx": 197
} | package api
import (
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/teamguardian"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
// GET /api/teams/:teamId/members
func GetTeamMembers(c *m.ReqContext) Response {
query := m.GetTeamMembersQuery{OrgId: c.OrgId, TeamId: c.ParamsInt64(":teamId")}
if err := bus.Dispatch(&query); err != nil {
return Error(500, "Failed to get Team Members", err)
}
for _, member := range query.Result {
member.AvatarUrl = dtos.GetGravatarUrl(member.Email)
member.Labels = []string{}
if setting.IsEnterprise && setting.LdapEnabled && member.External {
member.Labels = append(member.Labels, "LDAP")
}
}
return JSON(200, query.Result)
}
// POST /api/teams/:teamId/members
func AddTeamMember(c *m.ReqContext, cmd m.AddTeamMemberCommand) Response {
teamId := c.ParamsInt64(":teamId")
orgId := c.OrgId
if err := teamguardian.CanAdmin(orgId, teamId, c.SignedInUser); err != nil {
return Error(403, "Not allowed to add team member", err)
}
cmd.TeamId = teamId
cmd.OrgId = orgId
if err := bus.Dispatch(&cmd); err != nil {
if err == m.ErrTeamNotFound {
return Error(404, "Team not found", nil)
}
if err == m.ErrTeamMemberAlreadyAdded {
return Error(400, "User is already added to this team", nil)
}
return Error(500, "Failed to add Member to Team", err)
}
return JSON(200, &util.DynMap{
"message": "Member added to Team",
})
}
// PUT /:teamId/members/:userId
func UpdateTeamMember(c *m.ReqContext, cmd m.UpdateTeamMemberCommand) Response {
teamId := c.ParamsInt64(":teamId")
orgId := c.OrgId
if err := teamguardian.CanAdmin(orgId, teamId, c.SignedInUser); err != nil {
return Error(403, "Not allowed to update team member", err)
}
cmd.TeamId = teamId
cmd.UserId = c.ParamsInt64(":userId")
cmd.OrgId = orgId
if err := bus.Dispatch(&cmd); err != nil {
if err == m.ErrTeamMemberNotFound {
return Error(404, "Team member not found.", nil)
}
return Error(500, "Failed to update team member.", err)
}
return Success("Team member updated")
}
// DELETE /api/teams/:teamId/members/:userId
func (hs *HTTPServer) RemoveTeamMember(c *m.ReqContext) Response {
orgId := c.OrgId
teamId := c.ParamsInt64(":teamId")
userId := c.ParamsInt64(":userId")
if err := teamguardian.CanAdmin(orgId, teamId, c.SignedInUser); err != nil {
return Error(403, "Not allowed to remove team member", err)
}
protectLastAdmin := false
if c.OrgRole == m.ROLE_EDITOR {
protectLastAdmin = true
}
if err := bus.Dispatch(&m.RemoveTeamMemberCommand{OrgId: orgId, TeamId: teamId, UserId: userId, ProtectLastAdmin: protectLastAdmin}); err != nil {
if err == m.ErrTeamNotFound {
return Error(404, "Team not found", nil)
}
if err == m.ErrTeamMemberNotFound {
return Error(404, "Team member not found", nil)
}
return Error(500, "Failed to remove Member from Team", err)
}
return Success("Team Member removed")
}
| pkg/api/team_members.go | 1 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.004811583086848259,
0.0012304517440497875,
0.00016750556824263185,
0.00027931571821682155,
0.0015017413534224033
] |
{
"id": 5,
"code_window": [
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t})\n",
"\n",
"\t\t\tConvey(\"Should be able to remove a group with users and permissions\", func() {\n",
"\t\t\t\tgroupId := group2.Result.Id\n",
"\t\t\t\terr := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]})\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\tConvey(\"A user should not be able to remove the admin permission for the last admin\", func() {\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove the admin permission if there are other admins\", func() {\n",
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "add",
"edit_start_line_idx": 197
} | +++
title = "HTTP API"
description = "Grafana HTTP API"
keywords = ["grafana", "http", "documentation", "api", "overview"]
type = "docs"
[menu.docs]
name = "HTTP API"
identifier = "http_api"
weight = 9
+++
# HTTP API Reference
The Grafana backend exposes an HTTP API, the same API is used by the frontend to do everything from saving
dashboards, creating users and updating data sources.
## Supported HTTP APIs:
* [Authentication API]({{< relref "/http_api/auth.md" >}})
* [Dashboard API]({{< relref "/http_api/dashboard.md" >}})
* [Dashboard Versions API]({{< relref "http_api/dashboard_versions.md" >}})
* [Dashboard Permissions API]({{< relref "http_api/dashboard_permissions.md" >}})
* [Folder API]({{< relref "/http_api/folder.md" >}})
* [Folder Permissions API]({{< relref "http_api/folder_permissions.md" >}})
* [Folder/dashboard search API]({{< relref "/http_api/folder_dashboard_search.md" >}})
* [Data Source API]({{< relref "http_api/data_source.md" >}})
* [Organization API]({{< relref "http_api/org.md" >}})
* [Snapshot API]({{< relref "http_api/snapshot.md" >}})
* [Annotations API]({{< relref "http_api/annotations.md" >}})
* [Alerting API]({{< relref "http_api/alerting.md" >}})
* [User API]({{< relref "http_api/user.md" >}})
* [Team API]({{< relref "http_api/team.md" >}})
* [Admin API]({{< relref "http_api/admin.md" >}})
* [Preferences API]({{< relref "http_api/preferences.md" >}})
* [Other API]({{< relref "http_api/other.md" >}})
| docs/sources/http_api/index.md | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017611010116524994,
0.00016547797713428736,
0.00016142954700626433,
0.00016218613018281758,
0.000006166180355648976
] |
{
"id": 5,
"code_window": [
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t})\n",
"\n",
"\t\t\tConvey(\"Should be able to remove a group with users and permissions\", func() {\n",
"\t\t\t\tgroupId := group2.Result.Id\n",
"\t\t\t\terr := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]})\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\tConvey(\"A user should not be able to remove the admin permission for the last admin\", func() {\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove the admin permission if there are other admins\", func() {\n",
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "add",
"edit_start_line_idx": 197
} | import { DashboardAcl, DashboardAclDTO } from 'app/types/acl';
export function processAclItems(items: DashboardAclDTO[]): DashboardAcl[] {
return items.map(processAclItem).sort((a, b) => b.sortRank - a.sortRank || a.name.localeCompare(b.name));
}
function processAclItem(dto: DashboardAclDTO): DashboardAcl {
const item = dto as DashboardAcl;
item.sortRank = 0;
if (item.userId > 0) {
item.name = item.userLogin;
item.sortRank = 10;
} else if (item.teamId > 0) {
item.name = item.team;
item.sortRank = 20;
} else if (item.role) {
item.icon = 'fa fa-fw fa-street-view';
item.name = item.role;
item.sortRank = 30;
if (item.role === 'Editor') {
item.sortRank += 1;
}
}
if (item.inherited) {
item.sortRank += 100;
}
return item;
}
| public/app/core/reducers/processsAclItems.ts | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017729302635416389,
0.00017341091006528586,
0.00016742988373152912,
0.00017446035053581,
0.000003943491265090415
] |
{
"id": 5,
"code_window": [
"\t\t\t\t\terr = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t})\n",
"\n",
"\t\t\tConvey(\"Should be able to remove a group with users and permissions\", func() {\n",
"\t\t\t\tgroupId := group2.Result.Id\n",
"\t\t\t\terr := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]})\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t\tConvey(\"A user should not be able to remove the admin permission for the last admin\", func() {\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, m.ErrLastTeamAdmin)\n",
"\t\t\t\t})\n",
"\n",
"\t\t\t\tConvey(\"A user should be able to remove the admin permission if there are other admins\", func() {\n",
"\t\t\t\t\tAddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[1], Permission: m.PERMISSION_ADMIN})\n",
"\t\t\t\t\terr = UpdateTeamMember(&m.UpdateTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0], Permission: 0, ProtectLastAdmin: true})\n",
"\t\t\t\t\tSo(err, ShouldEqual, nil)\n",
"\t\t\t\t})\n"
],
"file_path": "pkg/services/sqlstore/team_test.go",
"type": "add",
"edit_start_line_idx": 197
} | /var/log/*.log /var/log/*/*.log {
weekly
size 50M
missingok
rotate 10
compress
delaycompress
notifempty
copytruncate
su root syslog
}
| devenv/docker/blocks/graphite1/conf/etc/logrotate.d/graphite-statsd | 0 | https://github.com/grafana/grafana/commit/c420af16b14e586b96d190e52f13805e0491e16a | [
0.00017780378402676433,
0.00017506213043816388,
0.0001723204622976482,
0.00017506213043816388,
0.000002741660864558071
] |
{
"id": 0,
"code_window": [
" \"lockfileVersion\": 2,\n",
" \"requires\": true,\n",
" \"packages\": {\n",
" \"\": {\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 7
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.9909489750862122,
0.11858274042606354,
0.00016987629351206124,
0.004866412840783596,
0.3089340329170227
] |
{
"id": 0,
"code_window": [
" \"lockfileVersion\": 2,\n",
" \"requires\": true,\n",
" \"packages\": {\n",
" \"\": {\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 7
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { test, expect } from './playwright-test-fixtures';
test('should consider dynamically set value', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.js': `
module.exports = { timeout: 2000 };
`,
'a.test.js': `
import { test, expect } from '@playwright/test';
test('pass', ({}, testInfo) => {
expect(testInfo.timeout).toBe(2000);
})
`
});
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
test('should allow different timeouts', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.js': `
module.exports = { projects: [
{ timeout: 2000 },
{ timeout: 4000 },
] };
`,
'a.test.js': `
import { test, expect } from '@playwright/test';
test('pass', ({}, testInfo) => {
console.log('timeout:' + testInfo.timeout);
});
`
});
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(2);
expect(result.output).toContain('timeout:2000');
expect(result.output).toContain('timeout:4000');
});
test('should prioritize value set via command line', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.js': `
module.exports = { timeout: 2000 };
`,
'a.test.js': `
import { test, expect } from '@playwright/test';
test('pass', ({}, testInfo) => {
expect(testInfo.timeout).toBe(1000);
})
`
}, { timeout: 1000 });
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
| tests/playwright-test/override-timeout.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017329081310890615,
0.00016986089758574963,
0.00016677501844242215,
0.00016997675993479788,
0.000001766238142408838
] |
{
"id": 0,
"code_window": [
" \"lockfileVersion\": 2,\n",
" \"requires\": true,\n",
" \"packages\": {\n",
" \"\": {\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 7
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { browserTest as it, expect } from '../config/browserTest';
it.use({
launchOptions: async ({ launchOptions }, use) => {
await use({
...launchOptions,
proxy: { server: 'per-context' }
});
}
});
it.beforeEach(({ server }) => {
server.setRoute('/target.html', async (req, res) => {
res.end('<html><title>Served by the proxy</title></html>');
});
});
it('should throw for missing global proxy on Chromium Windows', async ({ browserName, platform, browserType, server }) => {
it.skip(browserName !== 'chromium' || platform !== 'win32');
let browser;
try {
browser = await browserType.launch({
proxy: undefined,
});
const error = await browser.newContext({ proxy: { server: `localhost:${server.PORT}` } }).catch(e => e);
expect(error.toString()).toContain('Browser needs to be launched with the global proxy');
} finally {
await browser.close();
}
});
it('should work when passing the proxy only on the context level', async ({ browserName, platform, browserType, server, proxyServer }) => {
// Currently an upstream bug in the network stack of Chromium which leads that
// the wrong proxy gets used in the BrowserContext.
it.fixme(browserName === 'chromium' && platform === 'win32');
proxyServer.forwardTo(server.PORT);
let browser;
try {
browser = await browserType.launch({
proxy: undefined,
});
const context = await browser.newContext({
proxy: { server: `localhost:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
} finally {
await browser.close();
}
});
it('should throw for bad server value', async ({ contextFactory }) => {
const error = await contextFactory({
// @ts-expect-error server must be a string
proxy: { server: 123 }
}).catch(e => e);
expect(error.message).toContain('proxy.server: expected string, got number');
});
it('should use proxy', async ({ contextFactory, server, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
await context.close();
});
it('should set cookie for top-level domain', async ({ contextFactory, server, proxyServer, browserName, isLinux }) => {
it.fixme(browserName === 'webkit' && isLinux);
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}` }
});
server.setRoute('/empty.html', (req, res) => {
res.setHeader('Set-Cookie', `name=val; Domain=codes; Path=/;`);
res.end();
});
await context.request.get('http://codes/empty.html');
const [cookie] = await context.cookies();
expect(cookie).toBeTruthy();
expect(cookie.name).toBe('name');
expect(cookie.value).toBe('val');
await context.close();
});
it.describe('should proxy local network requests', () => {
for (const additionalBypass of [false, true]) {
it.describe(additionalBypass ? 'with other bypasses' : 'by default', () => {
for (const params of [
{
target: 'localhost',
description: 'localhost',
},
{
target: '127.0.0.1',
description: 'loopback address',
},
{
target: '169.254.3.4',
description: 'link-local'
}
]) {
it(`${params.description}`, async ({ platform, browserName, contextFactory, server, proxyServer }) => {
it.skip(browserName === 'webkit' && platform === 'darwin' && ['localhost', '127.0.0.1'].includes(params.target), 'Mac webkit does not proxy localhost');
const path = `/target-${additionalBypass}-${params.target}.html`;
server.setRoute(path, async (req, res) => {
res.end('<html><title>Served by the proxy</title></html>');
});
const url = `http://${params.target}:${server.PORT}${path}`;
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}`, bypass: additionalBypass ? '1.non.existent.domain.for.the.test' : undefined }
});
const page = await context.newPage();
await page.goto(url);
expect(proxyServer.requestUrls).toContain(url);
expect(await page.title()).toBe('Served by the proxy');
await page.goto('http://1.non.existent.domain.for.the.test/foo.html').catch(() => {});
if (additionalBypass)
expect(proxyServer.requestUrls).not.toContain('http://1.non.existent.domain.for.the.test/foo.html');
else
expect(proxyServer.requestUrls).toContain('http://1.non.existent.domain.for.the.test/foo.html');
await context.close();
});
}
});
}
});
it('should use ipv6 proxy', async ({ contextFactory, server, proxyServer, browserName }) => {
it.fail(browserName === 'firefox', 'page.goto: NS_ERROR_UNKNOWN_HOST');
it.fail(!!process.env.INSIDE_DOCKER, 'docker does not support IPv6 by default');
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `[0:0:0:0:0:0:0:1]:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
await context.close();
});
it('should use proxy twice', async ({ contextFactory, server, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
await page.goto('http://non-existent-2.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent-2.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
await context.close();
});
it('should use proxy for second page', async ({ contextFactory, server, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
const page2 = await context.newPage();
proxyServer.requestUrls = [];
await page2.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(await page2.title()).toBe('Served by the proxy');
await context.close();
});
it('should use proxy for https urls', async ({ contextFactory, httpsServer, proxyServer }) => {
httpsServer.setRoute('/target.html', async (req, res) => {
res.end('<html><title>Served by https server via proxy</title></html>');
});
proxyServer.forwardTo(httpsServer.PORT);
const context = await contextFactory({
ignoreHTTPSErrors: true,
proxy: { server: `localhost:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('https://non-existent.com/target.html');
expect(proxyServer.connectHosts).toContain('non-existent.com:443');
expect(await page.title()).toBe('Served by https server via proxy');
await context.close();
});
it('should work with IP:PORT notion', async ({ contextFactory, server, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
const context = await contextFactory({
proxy: { server: `127.0.0.1:${proxyServer.PORT}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
await context.close();
});
it('should throw for socks5 authentication', async ({ contextFactory }) => {
const error = await contextFactory({
proxy: { server: `socks5://localhost:1234`, username: 'user', password: 'secret' }
}).catch(e => e);
expect(error.message).toContain('Browser does not support socks5 proxy authentication');
});
it('should throw for socks4 authentication', async ({ contextFactory }) => {
const error = await contextFactory({
proxy: { server: `socks4://localhost:1234`, username: 'user', password: 'secret' }
}).catch(e => e);
expect(error.message).toContain('Socks4 proxy protocol does not support authentication');
});
it('should authenticate', async ({ contextFactory, server, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
let auth;
proxyServer.setAuthHandler(req => {
auth = req.headers['proxy-authorization'];
return !!auth;
});
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}`, username: 'user', password: 'secret' }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(proxyServer.requestUrls).toContain('http://non-existent.com/target.html');
expect(auth).toBe('Basic ' + Buffer.from('user:secret').toString('base64'));
expect(await page.title()).toBe('Served by the proxy');
await context.close();
});
it('should authenticate with empty password', async ({ contextFactory, server, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
let auth;
proxyServer.setAuthHandler(req => {
auth = req.headers['proxy-authorization'];
return !!auth;
});
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}`, username: 'user', password: '' }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(auth).toBe('Basic ' + Buffer.from('user:').toString('base64'));
expect(await page.title()).toBe('Served by the proxy');
await context.close();
});
it('should isolate proxy credentials between contexts', async ({ contextFactory, server, browserName, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
let auth;
proxyServer.setAuthHandler(req => {
auth = req.headers['proxy-authorization'];
return !!auth;
});
{
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}`, username: 'user1', password: 'secret1' }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(auth).toBe('Basic ' + Buffer.from('user1:secret1').toString('base64'));
expect(await page.title()).toBe('Served by the proxy');
await context.close();
}
auth = undefined;
{
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}`, username: 'user2', password: 'secret2' }
});
const page = await context.newPage();
await page.goto('http://non-existent.com/target.html');
expect(await page.title()).toBe('Served by the proxy');
expect(auth).toBe('Basic ' + Buffer.from('user2:secret2').toString('base64'));
await context.close();
}
});
it('should exclude patterns', async ({ contextFactory, server, browserName, headless, proxyServer }) => {
proxyServer.forwardTo(server.PORT);
// FYI: using long and weird domain names to avoid ATT DNS hijacking
// that resolves everything to some weird search results page.
//
// @see https://gist.github.com/CollinChaffin/24f6c9652efb3d6d5ef2f5502720ef00
const context = await contextFactory({
proxy: { server: `localhost:${proxyServer.PORT}`, bypass: '1.non.existent.domain.for.the.test, 2.non.existent.domain.for.the.test, .another.test' }
});
const page = await context.newPage();
await page.goto('http://0.non.existent.domain.for.the.test/target.html');
expect(proxyServer.requestUrls).toContain('http://0.non.existent.domain.for.the.test/target.html');
expect(await page.title()).toBe('Served by the proxy');
proxyServer.requestUrls = [];
const nonFaviconUrls = () => {
return proxyServer.requestUrls.filter(u => !u.includes('favicon'));
};
{
const error = await page.goto('http://1.non.existent.domain.for.the.test/target.html').catch(e => e);
expect(nonFaviconUrls()).toEqual([]);
expect(error.message).toBeTruthy();
// Make sure error page commits.
if (browserName === 'chromium')
await page.waitForURL('chrome-error://chromewebdata/');
else if (browserName === 'firefox')
await page.waitForURL('http://1.non.existent.domain.for.the.test/target.html', { waitUntil: 'commit' });
}
{
const error = await page.goto('http://2.non.existent.domain.for.the.test/target.html').catch(e => e);
expect(nonFaviconUrls()).toEqual([]);
expect(error.message).toBeTruthy();
// Make sure error page commits.
if (browserName === 'chromium')
await page.waitForURL('chrome-error://chromewebdata/');
else if (browserName === 'firefox')
await page.waitForURL('http://2.non.existent.domain.for.the.test/target.html', { waitUntil: 'commit' });
}
{
const error = await page.goto('http://foo.is.the.another.test/target.html').catch(e => e);
expect(nonFaviconUrls()).toEqual([]);
expect(error.message).toBeTruthy();
// Make sure error page commits.
if (browserName === 'chromium')
await page.waitForURL('chrome-error://chromewebdata/');
else if (browserName === 'firefox')
await page.waitForURL('http://foo.is.the.another.test/target.html', { waitUntil: 'commit' });
}
{
await page.goto('http://3.non.existent.domain.for.the.test/target.html');
expect(nonFaviconUrls()).toContain('http://3.non.existent.domain.for.the.test/target.html');
expect(await page.title()).toBe('Served by the proxy');
}
await context.close();
});
it('should use socks proxy', async ({ contextFactory, socksPort }) => {
const context = await contextFactory({
proxy: { server: `socks5://localhost:${socksPort}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com');
expect(await page.title()).toBe('Served by the SOCKS proxy');
await context.close();
});
it('should use socks proxy in second page', async ({ contextFactory, socksPort }) => {
const context = await contextFactory({
proxy: { server: `socks5://localhost:${socksPort}` }
});
const page = await context.newPage();
await page.goto('http://non-existent.com');
expect(await page.title()).toBe('Served by the SOCKS proxy');
const page2 = await context.newPage();
await page2.goto('http://non-existent.com');
expect(await page2.title()).toBe('Served by the SOCKS proxy');
await context.close();
});
it('does launch without a port', async ({ contextFactory }) => {
const context = await contextFactory({
proxy: { server: 'http://localhost' }
});
await context.close();
});
| tests/library/browsercontext-proxy.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0001761411695042625,
0.00017257360741496086,
0.00016771446098573506,
0.00017273849516641349,
0.0000018537516552896705
] |
{
"id": 0,
"code_window": [
" \"lockfileVersion\": 2,\n",
" \"requires\": true,\n",
" \"packages\": {\n",
" \"\": {\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 7
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { test, expect, retries, dumpTestTree } from './ui-mode-fixtures';
test.describe.configure({ mode: 'parallel', retries });
const basicTestTree = {
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('passes', () => {});
test('fails', () => {});
test.describe('suite', () => {
test('inner passes', () => {});
test('inner fails', () => {});
});
`,
'b.test.ts': `
import { test, expect } from '@playwright/test';
test('passes', () => {});
test('fails', () => {});
`,
};
test('should pick new / deleted files', async ({ runUITest, writeFiles, deleteFile }) => {
const { page } = await runUITest(basicTestTree);
await expect.poll(dumpTestTree(page)).toBe(`
▼ ◯ a.test.ts
◯ passes
◯ fails
► ◯ suite
▼ ◯ b.test.ts
◯ passes
◯ fails
`);
await writeFiles({
'c.test.ts': `
import { test, expect } from '@playwright/test';
test('passes', () => {});
test('fails', () => {});
`
});
await expect.poll(dumpTestTree(page)).toBe(`
▼ ◯ a.test.ts
◯ passes
◯ fails
► ◯ suite
▼ ◯ b.test.ts
◯ passes
◯ fails
▼ ◯ c.test.ts
◯ passes
◯ fails
`);
await deleteFile('a.test.ts');
await expect.poll(dumpTestTree(page)).toBe(`
▼ ◯ b.test.ts
◯ passes
◯ fails
▼ ◯ c.test.ts
◯ passes
◯ fails
`);
});
test('should pick new / deleted tests', async ({ runUITest, writeFiles, deleteFile }) => {
const { page } = await runUITest(basicTestTree);
await expect.poll(dumpTestTree(page)).toBe(`
▼ ◯ a.test.ts
◯ passes
◯ fails
► ◯ suite
▼ ◯ b.test.ts
◯ passes
◯ fails
`);
await writeFiles({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('passes', () => {});
test('new', () => {});
test('fails', () => {});
`
});
await expect.poll(dumpTestTree(page)).toBe(`
▼ ◯ a.test.ts
◯ passes
◯ new
◯ fails
▼ ◯ b.test.ts
◯ passes
◯ fails
`);
await deleteFile('a.test.ts');
await writeFiles({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('new', () => {});
`
});
await expect.poll(dumpTestTree(page)).toBe(`
▼ ◯ a.test.ts
◯ new
▼ ◯ b.test.ts
◯ passes
◯ fails
`);
});
test('should pick new / deleted nested tests', async ({ runUITest, writeFiles, deleteFile }) => {
const { page } = await runUITest(basicTestTree);
await expect.poll(dumpTestTree(page)).toContain(`
▼ ◯ a.test.ts
◯ passes
◯ fails
► ◯ suite
`);
await page.getByText('suite').click();
await page.keyboard.press('ArrowRight');
await expect.poll(dumpTestTree(page)).toContain(`
▼ ◯ a.test.ts
◯ passes
◯ fails
▼ ◯ suite <=
◯ inner passes
◯ inner fails
`);
await writeFiles({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('passes', () => {});
test.describe('suite', () => {
test('inner new', () => {});
test('inner fails', () => {});
});
`
});
await expect.poll(dumpTestTree(page)).toContain(`
▼ ◯ a.test.ts
◯ passes
▼ ◯ suite <=
◯ inner new
◯ inner fails
`);
});
test('should update test locations', async ({ runUITest, writeFiles, deleteFile }) => {
const { page } = await runUITest({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('passes', () => {});
`,
});
await expect.poll(dumpTestTree(page)).toContain(`
▼ ◯ a.test.ts
◯ passes
`);
const messages: any = [];
await page.exposeBinding('_overrideProtocolForTest', (_, data) => messages.push(data));
const passesItemLocator = page.getByRole('listitem').filter({ hasText: 'passes' });
await passesItemLocator.hover();
await passesItemLocator.getByTitle('Open in VS Code').click();
expect(messages).toEqual([{
method: 'open',
params: {
location: expect.stringContaining('a.test.ts:3'),
},
}]);
await writeFiles({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('new-test', () => {});
test('passes', () => {});
`
});
await expect.poll(dumpTestTree(page)).toContain(`
▼ ◯ a.test.ts
◯ new-test
◯ passes <=
`);
messages.length = 0;
await passesItemLocator.hover();
await passesItemLocator.getByTitle('Open in VS Code').click();
expect(messages).toEqual([{
method: 'open',
params: {
location: expect.stringContaining('a.test.ts:5'),
},
}]);
await expect(
page.getByTestId('source-code').locator('.source-tab-file-name')
).toHaveText('a.test.ts');
await expect(page.locator('.CodeMirror-code')).toContainText(`3 test('new-test', () => {});`);
});
| tests/playwright-test/ui-mode-test-update.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017708165978547186,
0.00017314917931798846,
0.00016450858674943447,
0.00017391005530953407,
0.0000030576163680962054
] |
{
"id": 1,
"code_window": [
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 11
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.9934326410293579,
0.20865561068058014,
0.00016498976037837565,
0.013738798908889294,
0.36493322253227234
] |
{
"id": 1,
"code_window": [
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 11
} | import { fileURLToPath, URL } from 'url'
import { defineConfig } from 'vite'
import vue from '@vitejs/plugin-vue'
// https://vitejs.dev/config/
export default defineConfig({
plugins: [vue()],
resolve: {
alias: {
'@': fileURLToPath(new URL('./src', import.meta.url))
}
},
})
| tests/components/ct-vue-vite/vite.config.js | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0001676027022767812,
0.00016758816491346806,
0.00016757362755015492,
0.00016758816491346806,
1.4537363313138485e-8
] |
{
"id": 1,
"code_window": [
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 11
} | /**
* Copyright 2018 Google Inc. All rights reserved.
* Modifications copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { browserTest as it, expect } from '../config/browserTest';
it('should affect accept-language header @smoke', async ({ browser, server }) => {
const context = await browser.newContext({ locale: 'fr-CH' });
const page = await context.newPage();
const [request] = await Promise.all([
server.waitForRequest('/empty.html'),
page.goto(server.EMPTY_PAGE),
]);
expect((request.headers['accept-language'] as string).substr(0, 5)).toBe('fr-CH');
await context.close();
});
it('should affect navigator.language', async ({ browser }) => {
const context = await browser.newContext({ locale: 'fr-FR' });
const page = await context.newPage();
expect(await page.evaluate(() => navigator.language)).toBe('fr-FR');
await context.close();
});
it('should format number', async ({ browser, server }) => {
{
const context = await browser.newContext({ locale: 'en-US' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
expect(await page.evaluate(() => (1000000.50).toLocaleString())).toBe('1,000,000.5');
await context.close();
}
{
const context = await browser.newContext({ locale: 'fr-CH' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
expect(await page.evaluate(() => (1000000.50).toLocaleString().replace(/\s/g, ' '))).toBe('1 000 000,5');
await context.close();
}
});
it('should format date', async ({ browser, server, browserName }) => {
{
const context = await browser.newContext({ locale: 'en-US', timezoneId: 'America/Los_Angeles' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
const formatted = 'Sat Nov 19 2016 10:12:34 GMT-0800 (Pacific Standard Time)';
expect(await page.evaluate(() => new Date(1479579154987).toString())).toBe(formatted);
await context.close();
}
{
const context = await browser.newContext({ locale: 'de-DE', timezoneId: 'Europe/Berlin' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
const formatted = 'Sat Nov 19 2016 19:12:34 GMT+0100 (Mitteleuropäische Normalzeit)';
expect(await page.evaluate(() => new Date(1479579154987).toString())).toBe(formatted);
await context.close();
}
});
it('should format number in popups', async ({ browser, server }) => {
const context = await browser.newContext({ locale: 'fr-CH' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
const [popup] = await Promise.all([
page.waitForEvent('popup'),
page.evaluate(url => window.open(url), server.PREFIX + '/formatted-number.html'),
]);
await popup.waitForLoadState('domcontentloaded');
const result = await popup.evaluate('window["result"]');
expect(result).toBe('1 000 000,5');
await context.close();
});
it('should affect navigator.language in popups', async ({ browser, server }) => {
const context = await browser.newContext({ locale: 'fr-FR' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
const [popup] = await Promise.all([
page.waitForEvent('popup'),
page.evaluate(url => window.open(url), server.PREFIX + '/formatted-number.html'),
]);
await popup.waitForLoadState('domcontentloaded');
const result = await popup.evaluate('window.initialNavigatorLanguage');
expect(result).toBe('fr-FR');
await context.close();
});
it('should work for multiple pages sharing same process', async ({ browser, server }) => {
const context = await browser.newContext({ locale: 'ru-RU' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
let [popup] = await Promise.all([
page.waitForEvent('popup'),
page.evaluate(url => { window.open(url); }, server.EMPTY_PAGE),
]);
[popup] = await Promise.all([
popup.waitForEvent('popup'),
popup.evaluate(url => { window.open(url); }, server.EMPTY_PAGE),
]);
await context.close();
});
it('should be isolated between contexts', async ({ browser, server }) => {
const context1 = await browser.newContext({ locale: 'en-US' });
const promises = [];
// By default firefox limits number of child web processes to 8.
for (let i = 0; i < 8; i++)
promises.push(context1.newPage());
await Promise.all(promises);
const context2 = await browser.newContext({ locale: 'ru-RU' });
const page2 = await context2.newPage();
const localeNumber = () => (1000000.50).toLocaleString();
const numbers = await Promise.all(context1.pages().map(page => page.evaluate(localeNumber)));
numbers.forEach(value => expect(value).toBe('1,000,000.5'));
expect(await page2.evaluate(localeNumber)).toBe('1 000 000,5');
await Promise.all([
context1.close(),
context2.close()
]);
});
it('should not change default locale in another context', async ({ browser }) => {
async function getContextLocale(context) {
const page = await context.newPage();
return await page.evaluate(() => (new Intl.NumberFormat()).resolvedOptions().locale);
}
let defaultLocale;
{
const context = await browser.newContext();
defaultLocale = await getContextLocale(context);
await context.close();
}
const localeOverride = defaultLocale === 'es-MX' ? 'de-DE' : 'es-MX';
{
const context = await browser.newContext({ locale: localeOverride });
expect(await getContextLocale(context)).toBe(localeOverride);
await context.close();
}
{
const context = await browser.newContext();
expect(await getContextLocale(context)).toBe(defaultLocale);
await context.close();
}
});
it('should format number in workers', async ({ browser, server }) => {
const context = await browser.newContext({ locale: 'es-MX' });
const page = await context.newPage();
await page.goto(server.EMPTY_PAGE);
const [worker] = await Promise.all([
page.waitForEvent('worker'),
page.evaluate(() => new Worker(URL.createObjectURL(new Blob(['console.log(1)'], { type: 'application/javascript' })))),
]);
expect(await worker.evaluate(() => (10000.20).toLocaleString())).toBe('10,000.2');
await context.close();
});
| tests/library/browsercontext-locale.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0001743633474688977,
0.00017183407908305526,
0.00016554493049625307,
0.00017262033361475915,
0.000002280847411384457
] |
{
"id": 1,
"code_window": [
" }\n",
" },\n",
" \"node_modules/@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 11
} | <template>
<div>
<header>
<slot name="header" />
</header>
<main>
<slot name="main" />
</main>
<footer>
<slot name="footer" />
</footer>
</div>
</template>
| tests/components/ct-vue2-cli/src/components/NamedSlots.vue | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017202629533130676,
0.00017005416157189757,
0.00016808202781248838,
0.00017005416157189757,
0.0000019721337594091892
] |
{
"id": 2,
"code_window": [
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" },\n",
" \"bin\": {\n",
" \"playwright\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 16
} | {
"private": true,
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
}
| tests/playwright-test/stable-test-runner/package.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.23476722836494446,
0.23476722836494446,
0.23476722836494446,
0.23476722836494446,
0
] |
{
"id": 2,
"code_window": [
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" },\n",
" \"bin\": {\n",
" \"playwright\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 16
} | /**
* Copyright 2018 Google Inc. All rights reserved.
* Modifications copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { expect, browserTest } from '../config/browserTest';
import { PNG } from 'playwright-core/lib/utilsBundle';
import { verifyViewport } from '../config/utils';
browserTest.describe('page screenshot', () => {
browserTest.skip(({ browserName, headless }) => browserName === 'firefox' && !headless, 'Firefox headed produces a different image.');
browserTest('should run in parallel in multiple pages', async ({ server, contextFactory }) => {
const context = await contextFactory();
const N = 5;
const pages = await Promise.all(Array(N).fill(0).map(async () => {
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
return page;
}));
const promises = [];
for (let i = 0; i < N; ++i)
promises.push(pages[i].screenshot({ clip: { x: 50 * (i % 2), y: 0, width: 50, height: 50 } }));
const screenshots = await Promise.all(promises);
for (let i = 0; i < N; ++i)
expect(screenshots[i]).toMatchSnapshot(`grid-cell-${i % 2}.png`);
await Promise.all(pages.map(page => page.close()));
});
browserTest('should work with a mobile viewport', async ({ browser, server, browserName }) => {
browserTest.skip(browserName === 'firefox');
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, isMobile: true });
const page = await context.newPage();
await page.goto(server.PREFIX + '/overflow.html');
const screenshot = await page.screenshot();
expect(screenshot).toMatchSnapshot('screenshot-mobile.png');
await context.close();
});
browserTest('should work with a mobile viewport and clip', async ({ browser, server, browserName, channel }) => {
browserTest.skip(browserName === 'firefox');
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, isMobile: true });
const page = await context.newPage();
await page.goto(server.PREFIX + '/overflow.html');
const screenshot = await page.screenshot({ clip: { x: 10, y: 10, width: 100, height: 150 } });
expect(screenshot).toMatchSnapshot('screenshot-mobile-clip.png');
await context.close();
});
browserTest('should work with a mobile viewport and fullPage', async ({ browser, server, browserName }) => {
browserTest.skip(browserName === 'firefox');
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, isMobile: true });
const page = await context.newPage();
await page.goto(server.PREFIX + '/overflow-large.html');
const screenshot = await page.screenshot({ fullPage: true });
expect(screenshot).toMatchSnapshot('screenshot-mobile-fullpage.png');
await context.close();
});
browserTest('should work with device scale factor', async ({ browser, server, isMac, browserName }) => {
browserTest.fixme(isMac && browserName === 'webkit');
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, deviceScaleFactor: 2 });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const screenshot = await page.screenshot();
expect(screenshot).toMatchSnapshot('screenshot-device-scale-factor.png');
await context.close();
});
browserTest('should work with device scale factor and clip', async ({ browser, server }) => {
const context = await browser.newContext({ viewport: { width: 500, height: 500 }, deviceScaleFactor: 3 });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const screenshot = await page.screenshot({ clip: { x: 50, y: 100, width: 150, height: 100 } });
expect(screenshot).toMatchSnapshot('screenshot-device-scale-factor-clip.png');
await context.close();
});
browserTest('should work with device scale factor and scale:css', async ({ browser, server }) => {
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, deviceScaleFactor: 2 });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const screenshot = await page.screenshot({ scale: 'css' });
expect(screenshot).toMatchSnapshot('screenshot-device-scale-factor-css-size.png');
await context.close();
});
browserTest('should work with device scale factor, clip and scale:css', async ({ browser, server }) => {
const context = await browser.newContext({ viewport: { width: 500, height: 500 }, deviceScaleFactor: 3 });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const screenshot = await page.screenshot({ clip: { x: 50, y: 100, width: 150, height: 100 }, scale: 'css' });
expect(screenshot).toMatchSnapshot('screenshot-device-scale-factor-clip-css-size.png');
await context.close();
});
browserTest('should throw if screenshot size is too large with device scale factor', async ({ browser, browserName, isMac }) => {
browserTest.info().annotations.push({ type: 'issue', description: 'https://github.com/microsoft/playwright/issues/16727' });
const context = await browser.newContext({ viewport: { width: 500, height: 500 }, deviceScaleFactor: 2 });
const page = await context.newPage();
{
await page.setContent(`<style>body {margin: 0; padding: 0;}</style><div style='min-height: 16383px; background: red;'></div>`);
const result = await page.screenshot({ fullPage: true });
expect(result).toBeTruthy();
}
{
await page.setContent(`<style>body {margin: 0; padding: 0;}</style><div style='min-height: 16384px; background: red;'></div>`);
const exception = await page.screenshot({ fullPage: true }).catch(e => e);
if (browserName === 'firefox' || (browserName === 'webkit' && !isMac))
expect(exception.message).toContain('Cannot take screenshot larger than 32767');
const image = await page.screenshot({ fullPage: true, scale: 'css' });
expect(image).toBeTruthy();
}
await context.close();
});
browserTest('should work with large size', async ({ browserName, headless, platform, contextFactory }) => {
browserTest.fixme(browserName === 'chromium' && !headless && platform === 'linux', 'Chromium has gpu problems on linux with large screenshots');
browserTest.slow(true, 'Large screenshot is slow');
const context = await contextFactory();
const page = await context.newPage();
await page.setViewportSize({ width: 1280, height: 800 });
await page.evaluate(() => {
document.body.style.margin = '0';
document.body.style.padding = '0';
document.documentElement.style.margin = '0';
document.documentElement.style.padding = '0';
const div = document.createElement('div');
div.style.width = '1250px';
div.style.height = '8440px';
div.style.background = 'linear-gradient(red, blue)';
document.body.appendChild(div);
});
const buffer = await page.screenshot({ fullPage: true });
const decoded = PNG.sync.read(buffer);
const pixel = (x: number, y: number) => {
const dst = new PNG({ width: 1, height: 1 });
PNG.bitblt(decoded, dst, x, y, 1, 1);
const pixels = dst.data;
return { r: pixels[0], g: pixels[1], b: pixels[2], a: pixels[3] };
};
expect(pixel(0, 0).r).toBeGreaterThan(128);
expect(pixel(0, 0).b).toBeLessThan(128);
expect(pixel(0, 8339).r).toBeLessThan(128);
expect(pixel(0, 8339).b).toBeGreaterThan(128);
});
browserTest('should handle vh units ', async ({ contextFactory }) => {
const context = await contextFactory();
const page = await context.newPage();
await page.setViewportSize({ width: 800, height: 500 });
await page.evaluate(() => {
document.body.style.margin = '0';
document.body.style.padding = '0';
document.documentElement.style.margin = '0';
document.documentElement.style.padding = '0';
const div = document.createElement('div');
div.style.width = '100%';
div.style.borderTop = '100vh solid red';
div.style.borderBottom = '100vh solid blue';
document.body.appendChild(div);
});
const buffer = await page.screenshot({ fullPage: true });
const decoded = PNG.sync.read(buffer);
const pixel = (x: number, y: number) => {
const dst = new PNG({ width: 1, height: 1 });
PNG.bitblt(decoded, dst, x, y, 1, 1);
const pixels = dst.data;
return { r: pixels[0], g: pixels[1], b: pixels[2], a: pixels[3] };
};
expect(pixel(0, 0).r).toBeGreaterThan(128);
expect(pixel(0, 0).b).toBeLessThan(128);
expect(pixel(0, 999).r).toBeLessThan(128);
expect(pixel(0, 999).b).toBeGreaterThan(128);
});
});
browserTest.describe('element screenshot', () => {
browserTest.skip(({ browserName, headless }) => browserName === 'firefox' && !headless);
browserTest('element screenshot should work with a mobile viewport', async ({ browser, server, browserName }) => {
browserTest.skip(browserName === 'firefox');
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, isMobile: true });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
await page.evaluate(() => window.scrollBy(50, 100));
const elementHandle = await page.$('.box:nth-of-type(3)');
const screenshot = await elementHandle.screenshot();
expect(screenshot).toMatchSnapshot('screenshot-element-mobile.png');
await context.close();
});
browserTest('element screenshot should work with device scale factor', async ({ browser, server, browserName, isMac }) => {
browserTest.skip(browserName === 'firefox');
browserTest.fixme(isMac && browserName === 'webkit');
const context = await browser.newContext({ viewport: { width: 320, height: 480 }, deviceScaleFactor: 2 });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
await page.evaluate(() => window.scrollBy(50, 100));
const elementHandle = await page.$('.box:nth-of-type(3)');
const screenshot = await elementHandle.screenshot();
expect(screenshot).toMatchSnapshot('screenshot-element-mobile-dsf.png');
await context.close();
});
browserTest('should take screenshots when default viewport is null', async ({ server, browser }) => {
const context = await browser.newContext({ viewport: null });
const page = await context.newPage();
await page.setContent(`<div style='height: 10000px; background: red'></div>`);
const windowSize = await page.evaluate(() => ({ width: window.innerWidth * window.devicePixelRatio, height: window.innerHeight * window.devicePixelRatio }));
const sizeBefore = await page.evaluate(() => ({ width: document.body.offsetWidth, height: document.body.offsetHeight }));
const screenshot = await page.screenshot();
expect(screenshot).toBeInstanceOf(Buffer);
const decoded = PNG.sync.read(screenshot);
expect(decoded.width).toBe(windowSize.width);
expect(decoded.height).toBe(windowSize.height);
const sizeAfter = await page.evaluate(() => ({ width: document.body.offsetWidth, height: document.body.offsetHeight }));
expect(sizeBefore.width).toBe(sizeAfter.width);
expect(sizeBefore.height).toBe(sizeAfter.height);
await context.close();
});
browserTest('should take fullPage screenshots when default viewport is null', async ({ server, browser }) => {
const context = await browser.newContext({ viewport: null });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const sizeBefore = await page.evaluate(() => ({ width: document.body.offsetWidth, height: document.body.offsetHeight }));
const screenshot = await page.screenshot({
fullPage: true
});
expect(screenshot).toBeInstanceOf(Buffer);
const sizeAfter = await page.evaluate(() => ({ width: document.body.offsetWidth, height: document.body.offsetHeight }));
expect(sizeBefore.width).toBe(sizeAfter.width);
expect(sizeBefore.height).toBe(sizeAfter.height);
await context.close();
});
browserTest('should restore default viewport after fullPage screenshot', async ({ browser }) => {
const context = await browser.newContext({ viewport: { width: 456, height: 789 } });
const page = await context.newPage();
await verifyViewport(page, 456, 789);
const screenshot = await page.screenshot({ fullPage: true });
expect(screenshot).toBeInstanceOf(Buffer);
await verifyViewport(page, 456, 789);
await context.close();
});
browserTest('should restore viewport after page screenshot and exception', async ({ browser, server, mode }) => {
browserTest.skip(mode !== 'default');
const context = await browser.newContext({ viewport: { width: 350, height: 360 } });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const __testHookBeforeScreenshot = () => { throw new Error('oh my'); };
const error = await page.screenshot({ fullPage: true, __testHookBeforeScreenshot } as any).catch(e => e);
expect(error.message).toContain('oh my');
await verifyViewport(page, 350, 360);
await context.close();
});
browserTest('should restore viewport after page screenshot and timeout', async ({ browser, server, mode }) => {
browserTest.skip(mode !== 'default');
const context = await browser.newContext({ viewport: { width: 350, height: 360 } });
const page = await context.newPage();
await page.goto(server.PREFIX + '/grid.html');
const __testHookAfterScreenshot = () => new Promise(f => setTimeout(f, 5000));
const error = await page.screenshot({ fullPage: true, __testHookAfterScreenshot, timeout: 3000 } as any).catch(e => e);
expect(error.message).toContain('page.screenshot: Timeout 3000ms exceeded');
await verifyViewport(page, 350, 360);
await page.setViewportSize({ width: 400, height: 400 });
await page.waitForTimeout(3000); // Give it some time to wrongly restore previous viewport.
await verifyViewport(page, 400, 400);
await context.close();
});
browserTest('should take element screenshot when default viewport is null and restore back', async ({ server, browser }) => {
const context = await browser.newContext({ viewport: null });
const page = await context.newPage();
await page.setContent(`
<div style="height: 14px">oooo</div>
<style>
div.to-screenshot {
border: 1px solid blue;
width: 600px;
height: 600px;
margin-left: 50px;
}
::-webkit-scrollbar{
display: none;
}
</style>
<div class="to-screenshot"></div>
<div class="to-screenshot"></div>
<div class="to-screenshot"></div>
`);
const sizeBefore = await page.evaluate(() => ({ width: document.body.offsetWidth, height: document.body.offsetHeight }));
const elementHandle = await page.$('div.to-screenshot');
const screenshot = await elementHandle.screenshot();
expect(screenshot).toBeInstanceOf(Buffer);
const sizeAfter = await page.evaluate(() => ({ width: document.body.offsetWidth, height: document.body.offsetHeight }));
expect(sizeBefore.width).toBe(sizeAfter.width);
expect(sizeBefore.height).toBe(sizeAfter.height);
await context.close();
});
browserTest('should restore viewport after element screenshot and exception', async ({ browser, mode }) => {
browserTest.skip(mode !== 'default');
const context = await browser.newContext({ viewport: { width: 350, height: 360 } });
const page = await context.newPage();
await page.setContent(`<div style="width:600px;height:600px;"></div>`);
const elementHandle = await page.$('div');
const __testHookBeforeScreenshot = () => { throw new Error('oh my'); };
const error = await elementHandle.screenshot({ __testHookBeforeScreenshot } as any).catch(e => e);
expect(error.message).toContain('oh my');
await verifyViewport(page, 350, 360);
await context.close();
});
browserTest('element screenshots should handle vh units ', async ({ contextFactory }) => {
const context = await contextFactory();
const page = await context.newPage();
await page.setViewportSize({ width: 800, height: 500 });
await page.evaluate(() => {
const div = document.createElement('div');
div.style.width = '100%';
div.style.borderTop = '100vh solid red';
div.style.borderBottom = '100vh solid blue';
document.body.appendChild(div);
});
const elementHandle = await page.$('div');
const buffer = await elementHandle.screenshot();
const decoded = PNG.sync.read(buffer);
const pixel = (x: number, y: number) => {
const dst = new PNG({ width: 1, height: 1 });
PNG.bitblt(decoded, dst, x, y, 1, 1);
const pixels = dst.data;
return { r: pixels[0], g: pixels[1], b: pixels[2], a: pixels[3] };
};
expect(pixel(0, 0).r).toBeGreaterThan(128);
expect(pixel(0, 0).b).toBeLessThan(128);
expect(pixel(0, 999).r).toBeLessThan(128);
expect(pixel(0, 999).b).toBeGreaterThan(128);
});
browserTest('should work if the main resource hangs', async ({ browser, browserName, mode, server }) => {
browserTest.skip(mode !== 'default');
browserTest.skip(browserName === 'chromium', 'https://github.com/microsoft/playwright/issues/9757');
const page = await browser.newPage();
server.setRoute('/slow', (req, res) => {
res.writeHead(200, {
'content-length': 4096,
'content-type': 'text/html',
});
});
try {
await page.goto(server.PREFIX + '/slow', { timeout: 1000 }).catch(() => {});
const screenshot = await page.screenshot();
expect(screenshot).toMatchSnapshot('hanging-main-resource.png');
} finally {
await page.close();
}
});
browserTest('should capture full element when larger than viewport with device scale factor', async ({ browser }) => {
const context = await browser.newContext({ viewport: { width: 501, height: 501 }, deviceScaleFactor: 2.5 });
const page = await context.newPage();
await page.setContent(`
<div style="height: 14px">oooo</div>
<style>
div.to-screenshot {
border: 4px solid red;
box-sizing: border-box;
width: 600px;
height: 600px;
margin-left: 50px;
background: rgb(0, 100, 200);
}
::-webkit-scrollbar{
display: none;
}
</style>
<div class="to-screenshot"></div>
`);
const screenshot = await page.locator('div.to-screenshot').screenshot();
expect(screenshot).toMatchSnapshot('element-larger-than-viewport-dsf.png');
await context.close();
});
browserTest('should capture full element when larger than viewport with device scale factor and scale:css', async ({ browser }) => {
const context = await browser.newContext({ viewport: { width: 501, height: 501 }, deviceScaleFactor: 2.5 });
const page = await context.newPage();
await page.setContent(`
<div style="height: 14px">oooo</div>
<style>
div.to-screenshot {
border: 4px solid red;
box-sizing: border-box;
width: 600px;
height: 600px;
margin-left: 50px;
background: rgb(0, 100, 200);
}
::-webkit-scrollbar{
display: none;
}
</style>
<div class="to-screenshot"></div>
`);
const screenshot = await page.locator('div.to-screenshot').screenshot({ scale: 'css' });
expect(screenshot).toMatchSnapshot('element-larger-than-viewport-dsf-css-size.png');
await context.close();
});
});
| tests/library/screenshot.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017743813805282116,
0.00017282077169511467,
0.0001671826175879687,
0.000172811807715334,
0.0000020443633275135653
] |
{
"id": 2,
"code_window": [
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" },\n",
" \"bin\": {\n",
" \"playwright\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 16
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { CallMetadata } from '../instrumentation';
import type { CallLog, CallLogStatus } from '@recorder/recorderTypes';
export function metadataToCallLog(metadata: CallMetadata, status: CallLogStatus): CallLog {
let title = metadata.apiName || metadata.method;
if (metadata.method === 'waitForEventInfo')
title += `(${metadata.params.info.event})`;
title = title.replace('object.expect', 'expect');
if (metadata.error)
status = 'error';
const params = {
url: metadata.params?.url,
selector: metadata.params?.selector,
};
let duration = metadata.endTime ? metadata.endTime - metadata.startTime : undefined;
if (typeof duration === 'number' && metadata.pauseStartTime && metadata.pauseEndTime) {
duration -= (metadata.pauseEndTime - metadata.pauseStartTime);
duration = Math.max(duration, 0);
}
const callLog: CallLog = {
id: metadata.id,
messages: metadata.log,
title,
status,
error: metadata.error?.error?.message,
params,
duration,
};
return callLog;
}
| packages/playwright-core/src/server/recorder/recorderUtils.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017420697258785367,
0.00017231873061973602,
0.00017042839317582548,
0.00017262207984458655,
0.0000013533576748159248
] |
{
"id": 2,
"code_window": [
" \"dependencies\": {\n",
" \"@types/node\": \"*\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" },\n",
" \"bin\": {\n",
" \"playwright\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 16
} | <!doctype html>
<html>
<head>
<title>Name test case 619</title>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
<link rel="stylesheet" href="/wai-aria/scripts/manual.css">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/wai-aria/scripts/ATTAcomm.js"></script>
<script>
setup({explicit_timeout: true, explicit_done: true });
var theTest = new ATTAcomm(
{
"steps" : [
{
"element" : "test",
"test" : {
"ATK" : [
[
"property",
"name",
"is",
"foo bar baz"
]
],
"AXAPI" : [
[
"property",
"AXDescription",
"is",
"foo bar baz"
]
],
"IAccessible2" : [
[
"property",
"accName",
"is",
"foo bar baz"
]
],
"UIA" : [
[
"property",
"Name",
"is",
"foo bar baz"
]
]
},
"title" : "step 1",
"type" : "test"
}
],
"title" : "Name test case 619"
}
) ;
</script>
</head>
<body>
<p>This test examines the ARIA properties for Name test case 619.</p>
<input type="password" id="test">
<label for="test">foo<input type="text" value="bar">baz</label>
<div id="manualMode"></div>
<div id="log"></div>
<div id="ATTAmessages"></div>
</body>
</html>
| tests/assets/wpt/accname/name_test_case_619-manual.html | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017506987205706537,
0.000172129221027717,
0.0001700331486063078,
0.00017192668747156858,
0.0000017049432017302024
] |
{
"id": 3,
"code_window": [
" }\n",
" },\n",
" \"node_modules/playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\",\n",
" \"bin\": {\n",
" \"playwright-core\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 47
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.9937735199928284,
0.2924562990665436,
0.00017584464512765408,
0.0031694932840764523,
0.4070533514022827
] |
{
"id": 3,
"code_window": [
" }\n",
" },\n",
" \"node_modules/playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\",\n",
" \"bin\": {\n",
" \"playwright-core\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 47
} | /**
* Copyright 2017 Google Inc. All rights reserved.
* Modifications copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { test as it, expect } from './pageTest';
it.skip(({ isWebView2 }) => isWebView2, 'Page.close() is not supported in WebView2');
it('should close page with active dialog', async ({ page }) => {
await page.setContent(`<button onclick="setTimeout(() => alert(1))">alert</button>`);
void page.click('button');
await page.waitForEvent('dialog');
await page.close();
});
it('should not accept after close', async ({ page }) => {
page.evaluate(() => alert()).catch(() => {});
const dialog = await page.waitForEvent('dialog');
await page.close();
const e = await dialog.dismiss().catch(e => e);
expect(e.message).toContain('Target page, context or browser has been closed');
});
| tests/page/page-close.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017318179016001523,
0.0001713276724331081,
0.0001685492170508951,
0.00017178984126076102,
0.0000017429438230465166
] |
{
"id": 3,
"code_window": [
" }\n",
" },\n",
" \"node_modules/playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\",\n",
" \"bin\": {\n",
" \"playwright-core\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 47
} | /**
* Copyright 2017 Google Inc. All rights reserved.
* Modifications copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { contextTest as testBase, expect } from '../config/browserTest';
const test = testBase.extend<{ crash: () => void }, { dummy: string }>({
crash: async ({ page, toImpl, browserName }, run) => {
await run(() => {
if (browserName === 'chromium')
page.goto('chrome://crash').catch(e => {});
else if (browserName === 'webkit')
toImpl(page)._delegate._session.send('Page.crash', {}).catch(e => {});
else if (browserName === 'firefox')
toImpl(page)._delegate._session.send('Page.crash', {}).catch(e => {});
});
},
// Force a separate worker to avoid messing up with other tests.
dummy: ['', { scope: 'worker' }],
});
test('should emit crash event when page crashes', async ({ page, crash }) => {
await page.setContent(`<div>This page should crash</div>`);
crash();
const crashedPage = await new Promise(f => page.on('crash', f));
expect(crashedPage).toBe(page);
});
test('should throw on any action after page crashes', async ({ page, crash, browserName }) => {
await page.setContent(`<div>This page should crash</div>`);
crash();
await page.waitForEvent('crash');
const err = await page.evaluate(() => {}).then(() => null, e => e);
expect(err).toBeTruthy();
// In Firefox, crashed page is sometimes "closed".
if (browserName === 'firefox')
expect(err.message.includes('Target page, context or browser has been closed') || err.message.includes('Target crashed'), err.message).toBe(true);
else
expect(err.message).toContain('Target crashed');
});
test('should cancel waitForEvent when page crashes', async ({ page, crash }) => {
await page.setContent(`<div>This page should crash</div>`);
const promise = page.waitForEvent('response').catch(e => e);
crash();
const error = await promise;
expect(error.message).toContain('Page crashed');
});
test('should cancel navigation when page crashes', async ({ server, page, crash }) => {
await page.setContent(`<div>This page should crash</div>`);
server.setRoute('/one-style.css', () => {});
const promise = page.goto(server.PREFIX + '/one-style.html').catch(e => e);
await page.waitForNavigation({ waitUntil: 'domcontentloaded' });
crash();
const error = await promise;
expect(error.message).toContain('page.goto: Page crashed');
});
test('should be able to close context when page crashes', async ({ isAndroid, isElectron, isWebView2, page, crash }) => {
test.skip(isAndroid);
test.skip(isElectron);
test.skip(isWebView2, 'Page.close() is not supported in WebView2');
await page.setContent(`<div>This page should crash</div>`);
crash();
await page.waitForEvent('crash');
await page.context().close();
});
| tests/library/page-event-crash.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017456630303058773,
0.00017241848399862647,
0.0001675690000411123,
0.0001725702459225431,
0.0000020274690086807823
] |
{
"id": 3,
"code_window": [
" }\n",
" },\n",
" \"node_modules/playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\",\n",
" \"bin\": {\n",
" \"playwright-core\": \"cli.js\"\n",
" },\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 47
} | {
"name": "flakiness-dashboard",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@azure/storage-blob": "^12.2.1"
}
},
"node_modules/@azure/abort-controller": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
"dependencies": {
"tslib": "^2.2.0"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/@azure/core-asynciterator-polyfill": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.0.tgz",
"integrity": "sha512-kmv8CGrPfN9SwMwrkiBK9VTQYxdFQEGe0BmQk+M8io56P9KNzpAxcWE/1fxJj7uouwN4kXF0BHW8DNlgx+wtCg=="
},
"node_modules/@azure/core-auth": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.4.0.tgz",
"integrity": "sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ==",
"dependencies": {
"@azure/abort-controller": "^1.0.0",
"tslib": "^2.2.0"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/@azure/core-http": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.1.tgz",
"integrity": "sha512-A3x+um3cAPgQe42Lu7Iv/x8/fNjhL/nIoEfqFxfn30EyxK6zC13n+OUxzZBRC0IzQqssqIbt4INf5YG7lYYFtw==",
"dependencies": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-auth": "^1.3.0",
"@azure/core-tracing": "1.0.0-preview.13",
"@azure/core-util": "^1.1.1",
"@azure/logger": "^1.0.0",
"@types/node-fetch": "^2.5.0",
"@types/tunnel": "^0.0.3",
"form-data": "^4.0.0",
"node-fetch": "^2.6.7",
"process": "^0.11.10",
"tslib": "^2.2.0",
"tunnel": "^0.0.6",
"uuid": "^8.3.0",
"xml2js": "^0.5.0"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@azure/core-lro": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.5.2.tgz",
"integrity": "sha512-tucUutPhBwCPu6v16KEFYML81npEL6gnT+iwewXvK5ZD55sr0/Vw2jfQETMiKVeARRrXHB2QQ3SpxxGi1zAUWg==",
"dependencies": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-util": "^1.2.0",
"@azure/logger": "^1.0.0",
"tslib": "^2.2.0"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@azure/core-paging": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.1.3.tgz",
"integrity": "sha512-his7Ah40ThEYORSpIAwuh6B8wkGwO/zG7gqVtmSE4WAJ46e36zUDXTKReUCLBDc6HmjjApQQxxcRFy5FruG79A==",
"dependencies": {
"@azure/core-asynciterator-polyfill": "^1.0.0"
},
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@azure/core-tracing": {
"version": "1.0.0-preview.13",
"resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz",
"integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==",
"dependencies": {
"@opentelemetry/api": "^1.0.1",
"tslib": "^2.2.0"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/@azure/core-util": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.3.0.tgz",
"integrity": "sha512-ANP0Er7R2KHHHjwmKzPF9wbd0gXvOX7yRRHeYL1eNd/OaNrMLyfZH/FQasHRVAf6rMXX+EAUpvYwLMFDHDI5Gw==",
"dependencies": {
"@azure/abort-controller": "^1.0.0",
"tslib": "^2.2.0"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@azure/logger": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.4.tgz",
"integrity": "sha512-ustrPY8MryhloQj7OWGe+HrYx+aoiOxzbXTtgblbV3xwCqpzUK36phH3XNHQKj3EPonyFUuDTfR3qFhTEAuZEg==",
"dependencies": {
"tslib": "^2.2.0"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@azure/storage-blob": {
"version": "12.13.0",
"resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.13.0.tgz",
"integrity": "sha512-t3Q2lvBMJucgTjQcP5+hvEJMAsJSk0qmAnjDLie2td017IiduZbbC9BOcFfmwzR6y6cJdZOuewLCNFmEx9IrXA==",
"dependencies": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-http": "^3.0.0",
"@azure/core-lro": "^2.2.0",
"@azure/core-paging": "^1.1.1",
"@azure/core-tracing": "1.0.0-preview.13",
"@azure/logger": "^1.0.0",
"events": "^3.0.0",
"tslib": "^2.2.0"
},
"engines": {
"node": ">=14.0.0"
}
},
"node_modules/@opentelemetry/api": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz",
"integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==",
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/@types/node-fetch": {
"version": "2.6.3",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.3.tgz",
"integrity": "sha512-ETTL1mOEdq/sxUtgtOhKjyB2Irra4cjxksvcMUR5Zr4n+PxVhsCD9WS46oPbHL3et9Zde7CNRr+WUNlcHvsX+w==",
"dependencies": {
"@types/node": "*",
"form-data": "^3.0.0"
}
},
"node_modules/@types/node-fetch/node_modules/form-data": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz",
"integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/@types/tunnel": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz",
"integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/events": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==",
"engines": {
"node": ">=0.8.x"
}
},
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/node-fetch": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz",
"integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/process": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==",
"engines": {
"node": ">= 0.6.0"
}
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
},
"node_modules/tslib": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz",
"integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg=="
},
"node_modules/tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
"engines": {
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
}
},
"node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/xml2js": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
},
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/xmlbuilder": {
"version": "11.0.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==",
"engines": {
"node": ">=4.0"
}
}
},
"dependencies": {
"@azure/abort-controller": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
"requires": {
"tslib": "^2.2.0"
}
},
"@azure/core-asynciterator-polyfill": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/@azure/core-asynciterator-polyfill/-/core-asynciterator-polyfill-1.0.0.tgz",
"integrity": "sha512-kmv8CGrPfN9SwMwrkiBK9VTQYxdFQEGe0BmQk+M8io56P9KNzpAxcWE/1fxJj7uouwN4kXF0BHW8DNlgx+wtCg=="
},
"@azure/core-auth": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.4.0.tgz",
"integrity": "sha512-HFrcTgmuSuukRf/EdPmqBrc5l6Q5Uu+2TbuhaKbgaCpP2TfAeiNaQPAadxO+CYBRHGUzIDteMAjFspFLDLnKVQ==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"tslib": "^2.2.0"
}
},
"@azure/core-http": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-3.0.1.tgz",
"integrity": "sha512-A3x+um3cAPgQe42Lu7Iv/x8/fNjhL/nIoEfqFxfn30EyxK6zC13n+OUxzZBRC0IzQqssqIbt4INf5YG7lYYFtw==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-auth": "^1.3.0",
"@azure/core-tracing": "1.0.0-preview.13",
"@azure/core-util": "^1.1.1",
"@azure/logger": "^1.0.0",
"@types/node-fetch": "^2.5.0",
"@types/tunnel": "^0.0.3",
"form-data": "^4.0.0",
"node-fetch": "^2.6.7",
"process": "^0.11.10",
"tslib": "^2.2.0",
"tunnel": "^0.0.6",
"uuid": "^8.3.0",
"xml2js": "^0.5.0"
}
},
"@azure/core-lro": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.5.2.tgz",
"integrity": "sha512-tucUutPhBwCPu6v16KEFYML81npEL6gnT+iwewXvK5ZD55sr0/Vw2jfQETMiKVeARRrXHB2QQ3SpxxGi1zAUWg==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-util": "^1.2.0",
"@azure/logger": "^1.0.0",
"tslib": "^2.2.0"
}
},
"@azure/core-paging": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.1.3.tgz",
"integrity": "sha512-his7Ah40ThEYORSpIAwuh6B8wkGwO/zG7gqVtmSE4WAJ46e36zUDXTKReUCLBDc6HmjjApQQxxcRFy5FruG79A==",
"requires": {
"@azure/core-asynciterator-polyfill": "^1.0.0"
}
},
"@azure/core-tracing": {
"version": "1.0.0-preview.13",
"resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz",
"integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==",
"requires": {
"@opentelemetry/api": "^1.0.1",
"tslib": "^2.2.0"
}
},
"@azure/core-util": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/@azure/core-util/-/core-util-1.3.0.tgz",
"integrity": "sha512-ANP0Er7R2KHHHjwmKzPF9wbd0gXvOX7yRRHeYL1eNd/OaNrMLyfZH/FQasHRVAf6rMXX+EAUpvYwLMFDHDI5Gw==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"tslib": "^2.2.0"
}
},
"@azure/logger": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.4.tgz",
"integrity": "sha512-ustrPY8MryhloQj7OWGe+HrYx+aoiOxzbXTtgblbV3xwCqpzUK36phH3XNHQKj3EPonyFUuDTfR3qFhTEAuZEg==",
"requires": {
"tslib": "^2.2.0"
}
},
"@azure/storage-blob": {
"version": "12.13.0",
"resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.13.0.tgz",
"integrity": "sha512-t3Q2lvBMJucgTjQcP5+hvEJMAsJSk0qmAnjDLie2td017IiduZbbC9BOcFfmwzR6y6cJdZOuewLCNFmEx9IrXA==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-http": "^3.0.0",
"@azure/core-lro": "^2.2.0",
"@azure/core-paging": "^1.1.1",
"@azure/core-tracing": "1.0.0-preview.13",
"@azure/logger": "^1.0.0",
"events": "^3.0.0",
"tslib": "^2.2.0"
}
},
"@opentelemetry/api": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz",
"integrity": "sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA=="
},
"@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"@types/node-fetch": {
"version": "2.6.3",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.3.tgz",
"integrity": "sha512-ETTL1mOEdq/sxUtgtOhKjyB2Irra4cjxksvcMUR5Zr4n+PxVhsCD9WS46oPbHL3et9Zde7CNRr+WUNlcHvsX+w==",
"requires": {
"@types/node": "*",
"form-data": "^3.0.0"
},
"dependencies": {
"form-data": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz",
"integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==",
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
}
}
}
},
"@types/tunnel": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz",
"integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==",
"requires": {
"@types/node": "*"
}
},
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"requires": {
"delayed-stream": "~1.0.0"
}
},
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
},
"events": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
"integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="
},
"form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
}
},
"mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="
},
"mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"requires": {
"mime-db": "1.52.0"
}
},
"node-fetch": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.9.tgz",
"integrity": "sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==",
"requires": {
"whatwg-url": "^5.0.0"
}
},
"process": {
"version": "0.11.10",
"resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
"integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
},
"tslib": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz",
"integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg=="
},
"tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
},
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
},
"webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
},
"whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"requires": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"xml2js": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
}
},
"xmlbuilder": {
"version": "11.0.1",
"resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz",
"integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA=="
}
}
}
| utils/flakiness-dashboard/package-lock.json | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.011570053175091743,
0.000905802589841187,
0.00016458499885629863,
0.00028576466138474643,
0.0016789728542789817
] |
{
"id": 4,
"code_window": [
" },\n",
" \"dependencies\": {\n",
" \"@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 60
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.985744297504425,
0.3281981945037842,
0.0001942398230312392,
0.012395360507071018,
0.45324572920799255
] |
{
"id": 4,
"code_window": [
" },\n",
" \"dependencies\": {\n",
" \"@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 60
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type {
TestType,
PlaywrightTestArgs,
PlaywrightTestConfig as BasePlaywrightTestConfig,
PlaywrightTestOptions,
PlaywrightWorkerArgs,
PlaywrightWorkerOptions,
Locator,
} from '@playwright/test';
import type { JsonObject } from '@playwright/experimental-ct-core/types/component';
import type { InlineConfig } from 'vite';
export type PlaywrightTestConfig<T = {}, W = {}> = Omit<BasePlaywrightTestConfig<T, W>, 'use'> & {
use?: BasePlaywrightTestConfig<T, W>['use'] & {
ctPort?: number;
ctTemplateDir?: string;
ctCacheDir?: string;
ctViteConfig?: InlineConfig | (() => Promise<InlineConfig>);
};
};
export interface MountOptions<HooksConfig extends JsonObject> {
hooksConfig?: HooksConfig;
}
interface MountResult extends Locator {
unmount(): Promise<void>;
update(component: JSX.Element): Promise<void>;
}
export interface ComponentFixtures {
mount<HooksConfig extends JsonObject>(
component: JSX.Element,
options?: MountOptions<HooksConfig>
): Promise<MountResult>;
}
export const test: TestType<
PlaywrightTestArgs & PlaywrightTestOptions & ComponentFixtures,
PlaywrightWorkerArgs & PlaywrightWorkerOptions
>;
/**
* Defines Playwright config
*/
export function defineConfig(config: PlaywrightTestConfig): PlaywrightTestConfig;
export function defineConfig<T>(config: PlaywrightTestConfig<T>): PlaywrightTestConfig<T>;
export function defineConfig<T, W>(config: PlaywrightTestConfig<T, W>): PlaywrightTestConfig<T, W>;
export { expect, devices } from '@playwright/test';
| packages/playwright-ct-react/index.d.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00019095584866590798,
0.00017275089339818805,
0.0001639654947211966,
0.00017030752496793866,
0.000008275526852230541
] |
{
"id": 4,
"code_window": [
" },\n",
" \"dependencies\": {\n",
" \"@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 60
} | <script src='networkidle.js'></script>
| tests/assets/networkidle.html | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00016699063417036086,
0.00016699063417036086,
0.00016699063417036086,
0.00016699063417036086,
0
] |
{
"id": 4,
"code_window": [
" },\n",
" \"dependencies\": {\n",
" \"@playwright/test\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==\",\n",
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/@playwright/test/-/test-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-3qqZNu3yyVUl5jblbQBA5p7S4oZDYqZ0KBCMK2YPDm/WpFdhypOiOCCqeSJR0OKbvHFiKTaHlQD/dXBKjLBIDg==\",\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 60
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { HTMLReport } from './types';
import type zip from '@zip.js/zip.js';
// @ts-ignore
import * as zipImport from '@zip.js/zip.js/lib/zip-no-worker-inflate.js';
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import './colors.css';
import type { LoadedReport } from './loadedReport';
import { ReportView } from './reportView';
// @ts-ignore
const zipjs = zipImport as typeof zip;
const ReportLoader: React.FC = () => {
const [report, setReport] = React.useState<LoadedReport | undefined>();
React.useEffect(() => {
if (report)
return;
const zipReport = new ZipReport();
zipReport.load().then(() => setReport(zipReport));
}, [report]);
return <ReportView report={report}></ReportView>;
};
window.onload = () => {
ReactDOM.render(<ReportLoader />, document.querySelector('#root'));
};
class ZipReport implements LoadedReport {
private _entries = new Map<string, zip.Entry>();
private _json!: HTMLReport;
async load() {
const zipReader = new zipjs.ZipReader(new zipjs.Data64URIReader((window as any).playwrightReportBase64), { useWebWorkers: false }) as zip.ZipReader;
for (const entry of await zipReader.getEntries())
this._entries.set(entry.filename, entry);
this._json = await this.entry('report.json') as HTMLReport;
}
json(): HTMLReport {
return this._json;
}
async entry(name: string): Promise<Object> {
const reportEntry = this._entries.get(name);
const writer = new zipjs.TextWriter() as zip.TextWriter;
await reportEntry!.getData!(writer);
return JSON.parse(await writer.getData());
}
}
| packages/html-reporter/src/index.tsx | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017469468002673239,
0.00017234192637261003,
0.0001697957923170179,
0.0001717006671242416,
0.0000015504488146689255
] |
{
"id": 5,
"code_window": [
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"@types/node\": {\n",
" \"version\": \"18.0.0\",\n",
" \"resolved\": \"https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 66
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.9854335784912109,
0.1260063499212265,
0.0001718469284242019,
0.008880243636667728,
0.30455729365348816
] |
{
"id": 5,
"code_window": [
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"@types/node\": {\n",
" \"version\": \"18.0.0\",\n",
" \"resolved\": \"https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 66
} | <script>
window.addEventListener('DOMContentLoaded', () => {
history.pushState({}, '', '#1');
});
</script>
| tests/assets/historyapi.html | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00016576889902353287,
0.00016576889902353287,
0.00016576889902353287,
0.00016576889902353287,
0
] |
{
"id": 5,
"code_window": [
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"@types/node\": {\n",
" \"version\": \"18.0.0\",\n",
" \"resolved\": \"https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 66
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import fs from 'fs';
import os from 'os';
import path from 'path';
import type { CRBrowserContext } from '../chromium/crBrowser';
import { CRBrowser } from '../chromium/crBrowser';
import type { CRSession } from '../chromium/crConnection';
import { CRConnection } from '../chromium/crConnection';
import type { CRPage } from '../chromium/crPage';
import { CRExecutionContext } from '../chromium/crExecutionContext';
import * as js from '../javascript';
import type { Page } from '../page';
import { TimeoutSettings } from '../../common/timeoutSettings';
import { wrapInASCIIBox } from '../../utils';
import { WebSocketTransport } from '../transport';
import { launchProcess, envArrayToObject } from '../../utils/processLauncher';
import { BrowserContext, validateBrowserContextOptions } from '../browserContext';
import type { BrowserWindow } from 'electron';
import type { Progress } from '../progress';
import { ProgressController } from '../progress';
import { helper } from '../helper';
import { eventsHelper } from '../../utils/eventsHelper';
import type { BrowserOptions, BrowserProcess } from '../browser';
import type { Playwright } from '../playwright';
import type * as childProcess from 'child_process';
import * as readline from 'readline';
import { RecentLogsCollector } from '../../common/debugLogger';
import { serverSideCallMetadata, SdkObject } from '../instrumentation';
import type * as channels from '@protocol/channels';
const ARTIFACTS_FOLDER = path.join(os.tmpdir(), 'playwright-artifacts-');
export class ElectronApplication extends SdkObject {
static Events = {
Close: 'close',
};
private _browserContext: CRBrowserContext;
private _nodeConnection: CRConnection;
private _nodeSession: CRSession;
private _nodeExecutionContext: js.ExecutionContext | undefined;
_nodeElectronHandlePromise: Promise<js.JSHandle<any>>;
readonly _timeoutSettings = new TimeoutSettings();
private _process: childProcess.ChildProcess;
constructor(parent: SdkObject, browser: CRBrowser, nodeConnection: CRConnection, process: childProcess.ChildProcess) {
super(parent, 'electron-app');
this._process = process;
this._browserContext = browser._defaultContext as CRBrowserContext;
this._browserContext.on(BrowserContext.Events.Close, () => {
// Emit application closed after context closed.
Promise.resolve().then(() => this.emit(ElectronApplication.Events.Close));
});
this._nodeConnection = nodeConnection;
this._nodeSession = nodeConnection.rootSession;
this._nodeElectronHandlePromise = new Promise(f => {
this._nodeSession.on('Runtime.executionContextCreated', async (event: any) => {
if (event.context.auxData && event.context.auxData.isDefault) {
this._nodeExecutionContext = new js.ExecutionContext(this, new CRExecutionContext(this._nodeSession, event.context), 'electron');
const source = `process.mainModule.require('electron')`;
f(await this._nodeExecutionContext.rawEvaluateHandle(source).then(objectId => new js.JSHandle(this._nodeExecutionContext!, 'object', 'ElectronModule', objectId)));
}
});
});
this._browserContext.setCustomCloseHandler(async () => {
const electronHandle = await this._nodeElectronHandlePromise;
await electronHandle.evaluate(({ app }) => app.quit()).catch(() => {});
});
}
async initialize() {
await this._nodeSession.send('Runtime.enable', {});
// Delay loading the app until browser is started and the browser targets are configured to auto-attach.
await this._nodeSession.send('Runtime.evaluate', { expression: '__playwright_run()' });
}
process(): childProcess.ChildProcess {
return this._process;
}
context(): BrowserContext {
return this._browserContext;
}
async close() {
const progressController = new ProgressController(serverSideCallMetadata(), this);
const closed = progressController.run(progress => helper.waitForEvent(progress, this, ElectronApplication.Events.Close).promise);
await this._browserContext.close(serverSideCallMetadata());
this._nodeConnection.close();
await closed;
}
async browserWindow(page: Page): Promise<js.JSHandle<BrowserWindow>> {
// Assume CRPage as Electron is always Chromium.
const targetId = (page._delegate as CRPage)._targetId;
const electronHandle = await this._nodeElectronHandlePromise;
return await electronHandle.evaluateHandle(({ BrowserWindow, webContents }, targetId) => {
const wc = webContents.fromDevToolsTargetId(targetId);
return BrowserWindow.fromWebContents(wc);
}, targetId);
}
}
export class Electron extends SdkObject {
constructor(playwright: Playwright) {
super(playwright, 'electron');
}
async launch(options: channels.ElectronLaunchParams): Promise<ElectronApplication> {
const {
args = [],
} = options;
const controller = new ProgressController(serverSideCallMetadata(), this);
controller.setLogName('browser');
return controller.run(async progress => {
let app: ElectronApplication | undefined = undefined;
const electronArguments = ['--inspect=0', '--remote-debugging-port=0', ...args];
if (os.platform() === 'linux') {
const runningAsRoot = process.geteuid && process.geteuid() === 0;
if (runningAsRoot && electronArguments.indexOf('--no-sandbox') === -1)
electronArguments.push('--no-sandbox');
}
const artifactsDir = await fs.promises.mkdtemp(ARTIFACTS_FOLDER);
const browserLogsCollector = new RecentLogsCollector();
const env = options.env ? envArrayToObject(options.env) : process.env;
let command: string;
if (options.executablePath) {
command = options.executablePath;
} else {
try {
// By default we fallback to the Electron App executable path.
// 'electron/index.js' resolves to the actual Electron App.
command = require('electron/index.js');
} catch (error: any) {
if ((error as NodeJS.ErrnoException)?.code === 'MODULE_NOT_FOUND') {
throw new Error('\n' + wrapInASCIIBox([
'Electron executablePath not found!',
'Please install it using `npm install -D electron` or set the executablePath to your Electron executable.',
].join('\n'), 1));
}
throw error;
}
// Only use our own loader for non-packaged apps.
// Packaged apps might have their own command line handling.
electronArguments.unshift('-r', require.resolve('./loader'));
}
// When debugging Playwright test that runs Electron, NODE_OPTIONS
// will make the debugger attach to Electron's Node. But Playwright
// also needs to attach to drive the automation. Disable external debugging.
delete env.NODE_OPTIONS;
const { launchedProcess, gracefullyClose, kill } = await launchProcess({
command,
args: electronArguments,
env,
log: (message: string) => {
progress.log(message);
browserLogsCollector.log(message);
},
stdio: 'pipe',
cwd: options.cwd,
tempDirectories: [artifactsDir],
attemptToGracefullyClose: () => app!.close(),
handleSIGINT: true,
handleSIGTERM: true,
handleSIGHUP: true,
onExit: () => {},
});
const waitForXserverError = new Promise(async (resolve, reject) => {
waitForLine(progress, launchedProcess, /Unable to open X display/).then(() => reject(new Error([
'Unable to open X display!',
`================================`,
'Most likely this is because there is no X server available.',
"Use 'xvfb-run' on Linux to launch your tests with an emulated display server.",
"For example: 'xvfb-run npm run test:e2e'",
`================================`,
progress.metadata.log
].join('\n')))).catch(() => {});
});
const nodeMatch = await waitForLine(progress, launchedProcess, /^Debugger listening on (ws:\/\/.*)$/);
const nodeTransport = await WebSocketTransport.connect(progress, nodeMatch[1]);
const nodeConnection = new CRConnection(nodeTransport, helper.debugProtocolLogger(), browserLogsCollector);
// Immediately release exiting process under debug.
waitForLine(progress, launchedProcess, /Waiting for the debugger to disconnect\.\.\./).then(() => {
nodeTransport.close();
}).catch(() => {});
const chromeMatch = await Promise.race([
waitForLine(progress, launchedProcess, /^DevTools listening on (ws:\/\/.*)$/),
waitForXserverError,
]) as RegExpMatchArray;
const chromeTransport = await WebSocketTransport.connect(progress, chromeMatch[1]);
const browserProcess: BrowserProcess = {
onclose: undefined,
process: launchedProcess,
close: gracefullyClose,
kill
};
const contextOptions: channels.BrowserNewContextParams = {
...options,
noDefaultViewport: true,
};
const browserOptions: BrowserOptions = {
name: 'electron',
isChromium: true,
headful: true,
persistent: contextOptions,
browserProcess,
protocolLogger: helper.debugProtocolLogger(),
browserLogsCollector,
artifactsDir,
downloadsPath: artifactsDir,
tracesDir: options.tracesDir || artifactsDir,
originalLaunchOptions: {},
};
validateBrowserContextOptions(contextOptions, browserOptions);
const browser = await CRBrowser.connect(this.attribution.playwright, chromeTransport, browserOptions);
app = new ElectronApplication(this, browser, nodeConnection, launchedProcess);
await app.initialize();
return app;
}, TimeoutSettings.timeout(options));
}
}
function waitForLine(progress: Progress, process: childProcess.ChildProcess, regex: RegExp): Promise<RegExpMatchArray> {
return new Promise((resolve, reject) => {
const rl = readline.createInterface({ input: process.stderr! });
const failError = new Error('Process failed to launch!');
const listeners = [
eventsHelper.addEventListener(rl, 'line', onLine),
eventsHelper.addEventListener(rl, 'close', reject.bind(null, failError)),
eventsHelper.addEventListener(process, 'exit', reject.bind(null, failError)),
// It is Ok to remove error handler because we did not create process and there is another listener.
eventsHelper.addEventListener(process, 'error', reject.bind(null, failError))
];
progress.cleanupWhenAborted(cleanup);
function onLine(line: string) {
const match = line.match(regex);
if (!match)
return;
cleanup();
resolve(match);
}
function cleanup() {
eventsHelper.removeEventListeners(listeners);
}
});
}
| packages/playwright-core/src/server/electron/electron.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0002120053832186386,
0.00017014403420034796,
0.0001640539849177003,
0.00016858306480571628,
0.000008411563612753525
] |
{
"id": 5,
"code_window": [
" \"requires\": {\n",
" \"@types/node\": \"*\",\n",
" \"fsevents\": \"2.3.2\",\n",
" \"playwright-core\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
" },\n",
" \"@types/node\": {\n",
" \"version\": \"18.0.0\",\n",
" \"resolved\": \"https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"playwright-core\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 66
} | # playwright-webkit
This package contains the [WebKit](https://www.webkit.org/) flavor of the [Playwright](http://github.com/microsoft/playwright) library. If you want to write end-to-end tests, we recommend [@playwright/test](https://playwright.dev/docs/intro).
| packages/playwright-webkit/README.md | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0001654377847444266,
0.0001654377847444266,
0.0001654377847444266,
0.0001654377847444266,
0
] |
{
"id": 6,
"code_window": [
" \"resolved\": \"https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz\",\n",
" \"integrity\": \"sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==\",\n",
" \"optional\": true\n",
" },\n",
" \"playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\"\n",
" }\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 81
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.9857490658760071,
0.24653564393520355,
0.00016300678544212133,
0.07376084476709366,
0.3198915123939514
] |
{
"id": 6,
"code_window": [
" \"resolved\": \"https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz\",\n",
" \"integrity\": \"sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==\",\n",
" \"optional\": true\n",
" },\n",
" \"playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\"\n",
" }\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 81
} | **/* | tests/components/ct-svelte-vite/.eslintignore | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017044741252902895,
0.00017044741252902895,
0.00017044741252902895,
0.00017044741252902895,
0
] |
{
"id": 6,
"code_window": [
" \"resolved\": \"https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz\",\n",
" \"integrity\": \"sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==\",\n",
" \"optional\": true\n",
" },\n",
" \"playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\"\n",
" }\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 81
} | import {foo} from '/slow.js';
console.log('foo is', foo);
window.results.push('module');
| tests/assets/load-event/module.js | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00016995535406749696,
0.00016995535406749696,
0.00016995535406749696,
0.00016995535406749696,
0
] |
{
"id": 6,
"code_window": [
" \"resolved\": \"https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz\",\n",
" \"integrity\": \"sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==\",\n",
" \"optional\": true\n",
" },\n",
" \"playwright-core\": {\n",
" \"version\": \"1.36.0-alpha-jun-15-2023\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz\",\n",
" \"integrity\": \"sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==\"\n",
" }\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep"
],
"after_edit": [
" \"version\": \"1.37.0-alpha-1689796912000\",\n",
" \"resolved\": \"https://registry.npmjs.org/playwright-core/-/playwright-core-1.37.0-alpha-1689796912000.tgz\",\n",
" \"integrity\": \"sha512-lHwiTFeWgeqFWION9eZ1r7zhKfOmHB1tZyXHlHN6iVsqEPVuYlHwJWAXTJviddRDKPfd3fORoeOMKwztHO5Lvw==\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package-lock.json",
"type": "replace",
"edit_start_line_idx": 81
} | <!doctype html>
<html>
<head>
<title>Name test case 601</title>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
<link rel="stylesheet" href="/wai-aria/scripts/manual.css">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/wai-aria/scripts/ATTAcomm.js"></script>
<script>
setup({explicit_timeout: true, explicit_done: true });
var theTest = new ATTAcomm(
{
"steps" : [
{
"element" : "test",
"test" : {
"ATK" : [
[
"property",
"name",
"is",
"foo"
]
],
"AXAPI" : [
[
"property",
"AXDescription",
"is",
"foo"
]
],
"IAccessible2" : [
[
"property",
"accName",
"is",
"foo"
]
],
"UIA" : [
[
"property",
"Name",
"is",
"foo"
]
]
},
"title" : "step 1",
"type" : "test"
}
],
"title" : "Name test case 601"
}
) ;
</script>
</head>
<body>
<p>This test examines the ARIA properties for Name test case 601.</p>
<div id="test" role="button">foo</div>
<div id="manualMode"></div>
<div id="log"></div>
<div id="ATTAmessages"></div>
</body>
</html>
| tests/assets/wpt/accname/name_test_case_601-manual.html | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0001741890300763771,
0.0001703062152955681,
0.0001653765793889761,
0.00017079948156606406,
0.000002493227384547936
] |
{
"id": 7,
"code_window": [
"{\n",
" \"private\": true,\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package.json",
"type": "replace",
"edit_start_line_idx": 3
} | {
"name": "stable-test-runner",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"dependencies": {
"@playwright/test": "1.36.0-alpha-jun-15-2023"
}
},
"node_modules/@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"dependencies": {
"@types/node": "*",
"playwright-core": "1.36.0-alpha-jun-15-2023"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ==",
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
},
"dependencies": {
"@playwright/test": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-UiWKP1xv3Xo983JOViLvhQ4zluvc3sfUDkIbTpZouq5MbzXWwHoy6pzQqfvh7Wgipe4EZOSmxCkamvYjQGTSrA==",
"requires": {
"@types/node": "*",
"fsevents": "2.3.2",
"playwright-core": "1.36.0-alpha-jun-15-2023"
}
},
"@types/node": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.0.0.tgz",
"integrity": "sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA=="
},
"fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"optional": true
},
"playwright-core": {
"version": "1.36.0-alpha-jun-15-2023",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.36.0-alpha-jun-15-2023.tgz",
"integrity": "sha512-jGN89dcf3NxT+gKtW523Vq2/1QSL7Iewkq6WcootSRoAIV3hRuE901jHd/r1iEbXnnIcWwLEOPSBvuXS+UXcIQ=="
}
}
}
| tests/playwright-test/stable-test-runner/package-lock.json | 1 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.13929560780525208,
0.01772649958729744,
0.0001671019708737731,
0.0030224176589399576,
0.04305756464600563
] |
{
"id": 7,
"code_window": [
"{\n",
" \"private\": true,\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package.json",
"type": "replace",
"edit_start_line_idx": 3
} | <script src='modernizr.js'></script>
<body></body>
<script>
const report = {};
for (const name in Modernizr) {
if (name.startsWith('_'))
continue;
if (['on', 'testAllProps', 'testProp', 'addTest', 'prefixed'].includes(name))
continue;
let value = Modernizr[name];
report[name] = value;
}
report['devicemotion2'] = 'ondevicemotion' in window;
report['deviceorientation2'] = 'orientation' in window;
report['deviceorientation3'] = 'ondeviceorientation' in window;
document.body.style.whiteSpace = 'pre';
document.body.textContent = JSON.stringify(report, undefined, 4);
window.report = JSON.parse(document.body.textContent);
</script>
| tests/assets/modernizr.html | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.0001708447525743395,
0.00016930047422647476,
0.00016828280058689415,
0.00016877388407010585,
0.0000011102181360911345
] |
{
"id": 7,
"code_window": [
"{\n",
" \"private\": true,\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package.json",
"type": "replace",
"edit_start_line_idx": 3
} | <!doctype html>
<html>
<head>
<title>Name test case 608</title>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type"/>
<link rel="stylesheet" href="/wai-aria/scripts/manual.css">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/wai-aria/scripts/ATTAcomm.js"></script>
<script>
setup({explicit_timeout: true, explicit_done: true });
var theTest = new ATTAcomm(
{
"steps" : [
{
"element" : "test",
"test" : {
"ATK" : [
[
"property",
"name",
"is",
"Tag"
]
],
"AXAPI" : [
[
"property",
"AXDescription",
"is",
"Tag"
]
],
"IAccessible2" : [
[
"property",
"accName",
"is",
"Tag"
]
],
"UIA" : [
[
"property",
"Name",
"is",
"Tag"
]
]
},
"title" : "step 1",
"type" : "test"
}
],
"title" : "Name test case 608"
}
) ;
</script>
</head>
<body>
<p>This test examines the ARIA properties for Name test case 608.</p>
<a href="test.html" id="test" title="Tag"></a>
<div id="manualMode"></div>
<div id="log"></div>
<div id="ATTAmessages"></div>
</body>
</html>
| tests/assets/wpt/accname/name_test_case_608-manual.html | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00017699117597658187,
0.00017085063154809177,
0.00016760078142397106,
0.00017031648894771934,
0.0000030124615477689076
] |
{
"id": 7,
"code_window": [
"{\n",
" \"private\": true,\n",
" \"dependencies\": {\n",
" \"@playwright/test\": \"1.36.0-alpha-jun-15-2023\"\n",
" }\n",
"}"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep"
],
"after_edit": [
" \"@playwright/test\": \"1.37.0-alpha-1689796912000\"\n"
],
"file_path": "tests/playwright-test/stable-test-runner/package.json",
"type": "replace",
"edit_start_line_idx": 3
} | /**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { test, expect } from './playwright-test-fixtures';
test('should fall back to launchOptions', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.ts': `
module.exports = {
use: {
launchOptions: {
headless: false,
channel: 'chrome',
}
}
};
`,
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('pass', async ({ headless, channel }) => {
expect.soft(headless).toBe(false);
expect.soft(channel).toBe('chrome');
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
test('should override launchOptions', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.ts': `
module.exports = {
use: {
headless: false,
channel: 'chrome',
launchOptions: {
headless: true,
channel: 'msedge',
}
}
};
`,
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('pass', async ({ headless, channel }) => {
expect.soft(headless).toBe(false);
expect.soft(channel).toBe('chrome');
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
test('should respect contextOptions', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.ts': `
module.exports = {
use: {
contextOptions: {
acceptDownloads: false,
bypassCSP: true,
colorScheme: 'dark',
deviceScaleFactor: 2,
extraHTTPHeaders: {'foo': 'bar'},
hasTouch: true,
ignoreHTTPSErrors: true,
isMobile: true,
javaScriptEnabled: true,
locale: 'fr-FR',
offline: true,
permissions: ['geolocation'],
timezoneId: 'TIMEZONE',
userAgent: 'UA',
viewport: null
}
}
};
`,
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('pass', async ({ acceptDownloads, bypassCSP, colorScheme, deviceScaleFactor, extraHTTPHeaders, hasTouch, ignoreHTTPSErrors, isMobile, javaScriptEnabled, locale, offline, permissions, timezoneId, userAgent, viewport }) => {
expect.soft(acceptDownloads).toBe(false);
expect.soft(bypassCSP).toBe(true);
expect.soft(colorScheme).toBe('dark');
expect.soft(deviceScaleFactor).toBe(2);
expect.soft(extraHTTPHeaders).toEqual({'foo': 'bar'});
expect.soft(hasTouch).toBe(true);
expect.soft(ignoreHTTPSErrors).toBe(true);
expect.soft(isMobile).toBe(true);
expect.soft(javaScriptEnabled).toBe(true);
expect.soft(locale).toBe('fr-FR');
expect.soft(offline).toBe(true);
expect.soft(permissions).toEqual(['geolocation']);
expect.soft(timezoneId).toBe('TIMEZONE');
expect.soft(userAgent).toBe('UA');
expect.soft(viewport).toBe(null);
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
test('should override contextOptions', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.ts': `
module.exports = {
use: {
acceptDownloads: false,
bypassCSP: true,
colorScheme: 'dark',
deviceScaleFactor: 2,
extraHTTPHeaders: {'foo': 'bar'},
hasTouch: true,
ignoreHTTPSErrors: true,
isMobile: true,
javaScriptEnabled: true,
locale: 'fr-FR',
offline: true,
permissions: ['geolocation'],
timezoneId: 'TIMEZONE',
userAgent: 'UA',
viewport: null,
contextOptions: {
acceptDownloads: true,
bypassCSP: false,
colorScheme: 'light',
deviceScaleFactor: 1,
extraHTTPHeaders: {'foo': 'bar2'},
hasTouch: false,
ignoreHTTPSErrors: false,
isMobile: false,
javaScriptEnabled: false,
locale: 'en-US',
offline: false,
permissions: [],
timezoneId: 'TIMEZONE 2',
userAgent: 'UA 2',
viewport: { width: 500, height: 500 }
}
}
};
`,
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('pass', async ({ acceptDownloads, bypassCSP, colorScheme, deviceScaleFactor, extraHTTPHeaders, hasTouch, ignoreHTTPSErrors, isMobile, javaScriptEnabled, locale, offline, permissions, timezoneId, userAgent, viewport }) => {
expect.soft(acceptDownloads).toBe(false);
expect.soft(bypassCSP).toBe(true);
expect.soft(colorScheme).toBe('dark');
expect.soft(deviceScaleFactor).toBe(2);
expect.soft(extraHTTPHeaders).toEqual({'foo': 'bar'});
expect.soft(hasTouch).toBe(true);
expect.soft(ignoreHTTPSErrors).toBe(true);
expect.soft(isMobile).toBe(true);
expect.soft(javaScriptEnabled).toBe(true);
expect.soft(locale).toBe('fr-FR');
expect.soft(offline).toBe(true);
expect.soft(permissions).toEqual(['geolocation']);
expect.soft(timezoneId).toBe('TIMEZONE');
expect.soft(userAgent).toBe('UA');
expect.soft(viewport).toBe(null);
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
test('should respect testIdAttribute', async ({ runInlineTest }) => {
const result = await runInlineTest({
'playwright.config.ts': `
module.exports = {
use: {
testIdAttribute: 'data-pw',
}
};
`,
'a.test.ts': `
import { test, expect } from '@playwright/test';
test('pass', async ({ page }) => {
await page.setContent('<div data-pw="myid">Hi</div>');
await expect(page.getByTestId('myid')).toHaveCount(1);
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(0);
expect(result.passed).toBe(1);
});
| tests/playwright-test/playwright.config.spec.ts | 0 | https://github.com/microsoft/playwright/commit/b2965158d3151f022cf8d76057649208cd85da0d | [
0.00036577461287379265,
0.0001910466089611873,
0.00016455110744573176,
0.00017414867761544883,
0.000043945205106865615
] |
{
"id": 0,
"code_window": [
"\t\t} else {\n",
"\t\t\tparams.Add(\"metricnamespace\", azJSONModel.MetricNamespace)\n",
"\t\t}\n",
"\n",
"\t\tazureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)\n",
"\t\tif azJSONModel.Region != \"\" {\n",
"\t\t\tparams.Add(\"region\", azJSONModel.Region)\n",
"\t\t} else {\n",
"\t\t\t// Deprecated, if no region is specified, only one resource group and name is supported\n",
"\t\t\tub := urlBuilder{\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tfilterInBody := true\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 109
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.9988810420036316,
0.10216439515352249,
0.00016445496294181794,
0.0001770273083820939,
0.29369795322418213
] |
{
"id": 0,
"code_window": [
"\t\t} else {\n",
"\t\t\tparams.Add(\"metricnamespace\", azJSONModel.MetricNamespace)\n",
"\t\t}\n",
"\n",
"\t\tazureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)\n",
"\t\tif azJSONModel.Region != \"\" {\n",
"\t\t\tparams.Add(\"region\", azJSONModel.Region)\n",
"\t\t} else {\n",
"\t\t\t// Deprecated, if no region is specified, only one resource group and name is supported\n",
"\t\t\tub := urlBuilder{\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tfilterInBody := true\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 109
} | import { valid, gte } from 'semver';
import {
isMetricAggregationWithField,
MetricAggregation,
MetricAggregationWithInlineScript,
} from './components/QueryEditor/MetricAggregationsEditor/aggregations';
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';
export const describeMetric = (metric: MetricAggregation) => {
if (!isMetricAggregationWithField(metric)) {
return metricAggregationConfig[metric.type].label;
}
// TODO: field might be undefined
return `${metricAggregationConfig[metric.type].label} ${metric.field}`;
};
/**
* Utility function to clean up aggregations settings objects.
* It removes nullish values and empty strings, array and objects
* recursing over nested objects (not arrays).
* @param obj
*/
export const removeEmpty = <T extends {}>(obj: T): Partial<T> =>
Object.entries(obj).reduce((acc, [key, value]) => {
// Removing nullish values (null & undefined)
if (value == null) {
return { ...acc };
}
// Removing empty arrays (This won't recurse the array)
if (Array.isArray(value) && value.length === 0) {
return { ...acc };
}
// Removing empty strings
if (typeof value === 'string' && value.length === 0) {
return { ...acc };
}
// Recursing over nested objects
if (!Array.isArray(value) && typeof value === 'object') {
const cleanObj = removeEmpty(value);
if (Object.keys(cleanObj).length === 0) {
return { ...acc };
}
return { ...acc, [key]: cleanObj };
}
return {
...acc,
[key]: value,
};
}, {});
/**
* This function converts an order by string to the correct metric id For example,
* if the user uses the standard deviation extended stat for the order by,
* the value would be "1[std_deviation]" and this would return "1"
*/
export const convertOrderByToMetricId = (orderBy: string): string | undefined => {
const metricIdMatches = orderBy.match(/^(\d+)/);
return metricIdMatches ? metricIdMatches[1] : void 0;
};
/** Gets the actual script value for metrics that support inline scripts.
*
* This is needed because the `script` is a bit polymorphic.
* when creating a query with Grafana < 7.4 it was stored as:
* ```json
* {
* "settings": {
* "script": {
* "inline": "value"
* }
* }
* }
* ```
*
* while from 7.4 it's stored as
* ```json
* {
* "settings": {
* "script": "value"
* }
* }
* ```
*
* This allows us to access both formats and support both queries created before 7.4 and after.
*/
export const getScriptValue = (metric: MetricAggregationWithInlineScript) =>
(typeof metric.settings?.script === 'object' ? metric.settings?.script?.inline : metric.settings?.script) || '';
/**
* Coerces the version to a valid semver string.
* It takes care of also converting from the legacy format (numeric) to the new one.
* @param version
*/
export const coerceESVersion = (version: string | number | undefined): string => {
if (typeof version === 'string') {
return valid(version) || '8.0.0';
}
switch (version) {
case 2:
return '2.0.0';
case 5:
return '5.0.0';
case 56:
return '5.6.0';
case 60:
return '6.0.0';
case 70:
return '7.0.0';
default:
return '8.0.0';
}
};
export const isSupportedVersion = (version: string): boolean => {
if (gte(version, '7.10.0')) {
return true;
}
return false;
};
| public/app/plugins/datasource/elasticsearch/utils.ts | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001767247449606657,
0.0001701150176813826,
0.00016288932238239795,
0.00017265748465433717,
0.00000473952604806982
] |
{
"id": 0,
"code_window": [
"\t\t} else {\n",
"\t\t\tparams.Add(\"metricnamespace\", azJSONModel.MetricNamespace)\n",
"\t\t}\n",
"\n",
"\t\tazureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)\n",
"\t\tif azJSONModel.Region != \"\" {\n",
"\t\t\tparams.Add(\"region\", azJSONModel.Region)\n",
"\t\t} else {\n",
"\t\t\t// Deprecated, if no region is specified, only one resource group and name is supported\n",
"\t\t\tub := urlBuilder{\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tfilterInBody := true\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 109
} | import { groupBy } from 'lodash';
import React, { useMemo } from 'react';
import { MetadataInspectorProps } from '@grafana/data';
import { CloudWatchDatasource } from '../datasource';
import { CloudWatchQuery, CloudWatchJsonData } from '../types';
export type Props = MetadataInspectorProps<CloudWatchDatasource, CloudWatchQuery, CloudWatchJsonData>;
export function MetaInspector({ data = [] }: Props) {
const rows = useMemo(() => groupBy(data, 'refId'), [data]);
return (
<>
<table className="filter-table form-inline">
<thead>
<tr>
<th>RefId</th>
<th>Metric Data Query ID</th>
<th>Metric Data Query Expression</th>
<th>Period</th>
<th />
</tr>
</thead>
{Object.entries(rows).map(([refId, frames], idx) => {
if (!frames.length) {
return null;
}
const frame = frames[0];
const custom = frame.meta?.custom;
if (!custom) {
return null;
}
return (
<tbody key={idx}>
<tr>
<td>{refId}</td>
<td>{custom.id}</td>
<td>{frame.meta?.executedQueryString}</td>
<td>{custom.period}</td>
</tr>
</tbody>
);
})}
</table>
</>
);
}
| public/app/plugins/datasource/cloudwatch/components/MetaInspector.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017880156519822776,
0.00017261151515413076,
0.000165010045748204,
0.00017467362340539694,
0.000005238439371169079
] |
{
"id": 0,
"code_window": [
"\t\t} else {\n",
"\t\t\tparams.Add(\"metricnamespace\", azJSONModel.MetricNamespace)\n",
"\t\t}\n",
"\n",
"\t\tazureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)\n",
"\t\tif azJSONModel.Region != \"\" {\n",
"\t\t\tparams.Add(\"region\", azJSONModel.Region)\n",
"\t\t} else {\n",
"\t\t\t// Deprecated, if no region is specified, only one resource group and name is supported\n",
"\t\t\tub := urlBuilder{\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tfilterInBody := true\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 109
} | <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M17,9.17a1,1,0,0,0-1.41,0L12,12.71,8.46,9.17a1,1,0,0,0-1.41,0,1,1,0,0,0,0,1.42l4.24,4.24a1,1,0,0,0,1.42,0L17,10.59A1,1,0,0,0,17,9.17Z"/></svg> | public/img/icons/unicons/angle-down.svg | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017411945736967027,
0.00017411945736967027,
0.00017411945736967027,
0.00017411945736967027,
0
] |
{
"id": 1,
"code_window": [
"\t\t\t\tResourceName: azJSONModel.ResourceName,\n",
"\t\t\t}\n",
"\t\t\tazureURL = ub.BuildMetricsURL()\n",
"\t\t}\n",
"\n",
"\t\t// old model\n",
"\t\tdimension := strings.TrimSpace(azJSONModel.Dimension)\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t// POST requests are only supported at the subscription level\n",
"\t\t\tfilterInBody = false\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 123
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.997957706451416,
0.05789027735590935,
0.00016214424977079034,
0.0001757587888278067,
0.22794127464294434
] |
{
"id": 1,
"code_window": [
"\t\t\t\tResourceName: azJSONModel.ResourceName,\n",
"\t\t\t}\n",
"\t\t\tazureURL = ub.BuildMetricsURL()\n",
"\t\t}\n",
"\n",
"\t\t// old model\n",
"\t\tdimension := strings.TrimSpace(azJSONModel.Dimension)\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t// POST requests are only supported at the subscription level\n",
"\t\t\tfilterInBody = false\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 123
} | export * from './Button';
export { ButtonGroup } from './ButtonGroup';
| packages/grafana-ui/src/components/Button/index.ts | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001723339519230649,
0.0001723339519230649,
0.0001723339519230649,
0.0001723339519230649,
0
] |
{
"id": 1,
"code_window": [
"\t\t\t\tResourceName: azJSONModel.ResourceName,\n",
"\t\t\t}\n",
"\t\t\tazureURL = ub.BuildMetricsURL()\n",
"\t\t}\n",
"\n",
"\t\t// old model\n",
"\t\tdimension := strings.TrimSpace(azJSONModel.Dimension)\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t// POST requests are only supported at the subscription level\n",
"\t\t\tfilterInBody = false\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 123
} | import { FieldMatcherID, fieldMatchers, FieldType, MutableDataFrame } from '@grafana/data';
import { BarAlignment, GraphDrawStyle, GraphTransform, LineInterpolation, StackingMode } from '@grafana/schema';
import { preparePlotFrame } from '../GraphNG/utils';
import { getStackingGroups, preparePlotData2, timeFormatToTemplate } from './utils';
describe('timeFormatToTemplate', () => {
it.each`
format | expected
${'HH:mm:ss'} | ${'{HH}:{mm}:{ss}'}
${'HH:mm'} | ${'{HH}:{mm}'}
${'MM/DD HH:mm'} | ${'{MM}/{DD} {HH}:{mm}'}
${'MM/DD'} | ${'{MM}/{DD}'}
${'YYYY-MM'} | ${'{YYYY}-{MM}'}
${'YYYY'} | ${'{YYYY}'}
`('should convert $format to $expected', ({ format, expected }) => {
expect(timeFormatToTemplate(format)).toEqual(expected);
});
});
describe('preparePlotData2', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{ name: 'a', values: [-10, 20, 10] },
{ name: 'b', values: [10, 10, 10] },
{ name: 'c', values: [20, 20, 20] },
],
});
it('creates array from DataFrame', () => {
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
20,
20,
20,
],
]
`);
});
describe('transforms', () => {
it('negative-y transform', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{ name: 'a', values: [-10, 20, 10] },
{ name: 'b', values: [10, 10, 10] },
{ name: 'c', values: [20, 20, 20], config: { custom: { transform: GraphTransform.NegativeY } } },
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
-20,
-20,
-20,
],
]
`);
});
it('negative-y transform with null/undefined values', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{ name: 'a', values: [-10, 20, 10, 30] },
{ name: 'b', values: [10, 10, 10, null] },
{ name: 'c', values: [null, 20, 20, 20], config: { custom: { transform: GraphTransform.NegativeY } } },
{ name: 'd', values: [20, 20, 20, null], config: { custom: { transform: GraphTransform.NegativeY } } },
{ name: 'e', values: [20, null, 20, 20], config: { custom: { transform: GraphTransform.NegativeY } } },
{ name: 'f', values: [10, 10, 10, undefined] },
{ name: 'g', values: [undefined, 20, 20, 20], config: { custom: { transform: GraphTransform.NegativeY } } },
{ name: 'h', values: [20, 20, 20, undefined], config: { custom: { transform: GraphTransform.NegativeY } } },
{ name: 'i', values: [20, undefined, 20, 20], config: { custom: { transform: GraphTransform.NegativeY } } },
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
undefined,
],
[
-10,
20,
10,
30,
],
[
10,
10,
10,
null,
],
[
null,
-20,
-20,
-20,
],
[
-20,
-20,
-20,
null,
],
[
-20,
null,
-20,
-20,
],
[
10,
10,
10,
undefined,
],
[
undefined,
-20,
-20,
-20,
],
[
-20,
-20,
-20,
undefined,
],
[
-20,
undefined,
-20,
-20,
],
]
`);
});
it('constant transform', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{ name: 'a', values: [-10, 20, 10], config: { custom: { transform: GraphTransform.Constant } } },
{ name: 'b', values: [10, 10, 10] },
{ name: 'c', values: [20, 20, 20] },
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
undefined,
undefined,
],
[
10,
10,
10,
],
[
20,
20,
20,
],
]
`);
});
});
describe('stacking', () => {
it('none', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{
name: 'a',
values: [-10, 20, 10],
config: { custom: { stacking: { mode: StackingMode.None } } },
},
{
name: 'b',
values: [10, 10, 10],
config: { custom: { stacking: { mode: StackingMode.None } } },
},
{
name: 'c',
values: [20, 20, 20],
config: { custom: { stacking: { mode: StackingMode.None } } },
},
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
20,
20,
20,
],
]
`);
});
it('standard', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{
name: 'a',
values: [-10, 20, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'b',
values: [10, 10, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'c',
values: [20, 20, 20],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
30,
30,
30,
],
]
`);
});
it('standard with negative y transform', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{
name: 'a',
values: [-10, 20, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'b',
values: [10, 10, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'c',
values: [20, 20, 20],
config: {
custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' }, transform: GraphTransform.NegativeY },
},
},
{
name: 'd',
values: [10, 10, 10],
config: {
custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' }, transform: GraphTransform.NegativeY },
},
},
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
-30,
0,
-10,
],
[
-40,
-10,
-20,
],
]
`);
});
it('standard with multiple groups', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{
name: 'a',
values: [-10, 20, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'b',
values: [10, 10, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'c',
values: [20, 20, 20],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'd',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackB' } } },
},
{
name: 'e',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackB' } } },
},
{
name: 'f',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackB' } } },
},
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
30,
30,
30,
],
[
1,
2,
3,
],
[
2,
4,
6,
],
[
3,
6,
9,
],
]
`);
});
it('standard with multiple groups and hidden fields', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [9997, 9998, 9999] },
{
name: 'a',
values: [-10, 20, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' }, hideFrom: { viz: true } } },
},
{
// Will ignore a series as stacking base as it's hidden from viz
name: 'b',
values: [10, 10, 10],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackA' } } },
},
{
name: 'd',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackB' } } },
},
{
name: 'e',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackB' }, hideFrom: { viz: true } } },
},
{
// Will ignore e series as stacking base as it's hidden from viz
name: 'f',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'stackB' } } },
},
],
});
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
9997,
9998,
9999,
],
[
-10,
20,
10,
],
[
10,
10,
10,
],
[
1,
2,
3,
],
[
1,
2,
3,
],
[
2,
4,
6,
],
]
`);
});
});
it('accumulates stacks only at indices where stacking group has at least 1 value', () => {
// extracted data from plot in panel-graph/graph-ng-stacking2.json
const frameData = [
[[1639976945832], [1000]],
[
[1639803285888, 1639976945832, 1640150605776, 1641192565440],
[2500, 600, 350, 500],
],
[
[1639803285888, 1639976945832, 1640150605776, 1640324265720],
[28000, 3100, 36000, 2800],
],
[
[1639976945832, 1640324265720, 1640497925664],
[255, 651, 50],
],
[
[1639803285888, 1639976945832],
[5000, 1231],
],
[
[
1639455966000, 1639629625944, 1639803285888, 1639976945832, 1640150605776, 1640324265720, 1640497925664,
1640671585608, 1640845245552, 1641018905496,
],
[122, 123, 12345, 23456, 34567, 12345, 8000, 3000, 1000, 21],
],
[[1641539885328], [20]],
[
[1641192565440, 1641539885328],
[210, 321],
],
[
[1640671585608, 1641539885328],
[210, 210],
],
[
[1639803285888, 1639976945832, 1640150605776, 1640497925664, 1640845245552],
[250, 852, 1234, 321, 432],
],
[
[
1640324265720, 1640497925664, 1640671585608, 1640845245552, 1641018905496, 1641192565440, 1641366225384,
1641539885328, 1641713545272, 1641887205216, 1642060865160, 1642234525104, 1642408185048,
],
[543, 18000, 17000, 12000, 8500, 8000, 5000, 3000, 2500, 2200, 3000, 1520, 665.35],
],
[[1641887205216], [800]],
[
[
1640150605776, 1640324265720, 1640497925664, 1640671585608, 1640845245552, 1641018905496, 1641192565440,
1641366225384, 1641539885328, 1641713545272, 1641887205216, 1642060865160, 1642234525104,
],
[14173, 14805, 5600, 5950, 775, 725, 1450, 3175, 1850, 1025, 2700, 4825, 3600],
],
[[1642234525104], [1675]],
[[1640150605776], [433.16]],
[
[
1640324265720, 1640497925664, 1640671585608, 1640845245552, 1641018905496, 1641192565440, 1641366225384,
1641539885328, 1641713545272, 1641887205216, 1642060865160, 1642234525104, 1642408185048,
],
[
41250, 45150, 45870.16, 38728.17, 39931.77, 39831.8, 38252.06, 44332.92, 51359.74, 56155.84, 55676.92,
55323.84, 13830.96,
],
],
[
[1640845245552, 1641018905496],
[52.89, 569.57],
],
[
[
1641018905496, 1641192565440, 1641366225384, 1641539885328, 1641713545272, 1641887205216, 1642060865160,
1642234525104, 1642408185048,
],
[2140.34, 4074.92, 1557.85, 1097.74, 692.06, 758.67, 957.56, 1470.49, 198.18],
],
];
const names = 'abcdefghijklmnopqrstuvwxyz'.split('').reverse();
const dfs = frameData.map(([xs, ys]) => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: xs },
{
name: names.pop()!,
values: ys,
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'A' } } },
},
],
});
return df;
});
const df = preparePlotFrame(dfs, {
x: fieldMatchers.get(FieldMatcherID.firstTimeField).get({}),
y: fieldMatchers.get(FieldMatcherID.numeric).get({}),
})!;
expect(preparePlotData2(df, getStackingGroups(df))).toMatchInlineSnapshot(`
[
[
1639455966000,
1639629625944,
1639803285888,
1639976945832,
1640150605776,
1640324265720,
1640497925664,
1640671585608,
1640845245552,
1641018905496,
1641192565440,
1641366225384,
1641539885328,
1641713545272,
1641887205216,
1642060865160,
1642234525104,
1642408185048,
],
[
0,
0,
0,
1000,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
2500,
1600,
350,
0,
0,
0,
0,
0,
500,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
30500,
4700,
36350,
2800,
0,
0,
0,
0,
500,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
30500,
4955,
36350,
3451,
50,
0,
0,
0,
500,
0,
0,
0,
0,
0,
0,
0,
],
[
0,
0,
35500,
6186,
36350,
3451,
50,
0,
0,
0,
500,
0,
0,
0,
0,
0,
0,
0,
],
[
122,
123,
47845,
29642,
70917,
15796,
8050,
3000,
1000,
21,
500,
0,
0,
0,
0,
0,
0,
0,
],
[
122,
123,
47845,
29642,
70917,
15796,
8050,
3000,
1000,
21,
500,
0,
20,
0,
0,
0,
0,
0,
],
[
122,
123,
47845,
29642,
70917,
15796,
8050,
3000,
1000,
21,
710,
0,
341,
0,
0,
0,
0,
0,
],
[
122,
123,
47845,
29642,
70917,
15796,
8050,
3210,
1000,
21,
710,
0,
551,
0,
0,
0,
0,
0,
],
[
122,
123,
48095,
30494,
72151,
15796,
8371,
3210,
1432,
21,
710,
0,
551,
0,
0,
0,
0,
0,
],
[
122,
123,
48095,
30494,
72151,
16339,
26371,
20210,
13432,
8521,
8710,
5000,
3551,
2500,
2200,
3000,
1520,
665.35,
],
[
122,
123,
48095,
30494,
72151,
16339,
26371,
20210,
13432,
8521,
8710,
5000,
3551,
2500,
3000,
3000,
1520,
665.35,
],
[
122,
123,
48095,
30494,
86324,
31144,
31971,
26160,
14207,
9246,
10160,
8175,
5401,
3525,
5700,
7825,
5120,
665.35,
],
[
122,
123,
48095,
30494,
86324,
31144,
31971,
26160,
14207,
9246,
10160,
8175,
5401,
3525,
5700,
7825,
6795,
665.35,
],
[
122,
123,
48095,
30494,
86757.16,
31144,
31971,
26160,
14207,
9246,
10160,
8175,
5401,
3525,
5700,
7825,
6795,
665.35,
],
[
122,
123,
48095,
30494,
86757.16,
72394,
77121,
72030.16,
52935.17,
49177.77,
49991.8,
46427.06,
49733.92,
54884.74,
61855.84,
63501.92,
62118.84,
14496.31,
],
[
122,
123,
48095,
30494,
86757.16,
72394,
77121,
72030.16,
52988.06,
49747.34,
49991.8,
46427.06,
49733.92,
54884.74,
61855.84,
63501.92,
62118.84,
14496.31,
],
[
122,
123,
48095,
30494,
86757.16,
72394,
77121,
72030.16,
52988.06,
51887.67999999999,
54066.72,
47984.909999999996,
50831.659999999996,
55576.799999999996,
62614.509999999995,
64459.479999999996,
63589.329999999994,
14694.49,
],
]
`);
});
});
describe('auto stacking groups', () => {
test('split on stacking mode', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [0, 1, 2] },
{
name: 'b',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Percent } } },
},
{
name: 'c',
values: [4, 5, 6],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
],
});
expect(getStackingGroups(df)).toMatchInlineSnapshot(`
[
{
"dir": 1,
"series": [
1,
],
},
{
"dir": 1,
"series": [
2,
],
},
]
`);
});
test('split pos/neg', () => {
// since we expect most series to be Pos, we try to bail early when scanning all values
// as soon as we find a value >= 0, it's assumed Pos, else Neg
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [0, 1, 2] },
{
name: 'a',
values: [-1, null, -3],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
{
name: 'b',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
{
name: 'c',
values: [0, 0, 0],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
{
name: 'd',
values: [null, -0, null],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
],
});
expect(getStackingGroups(df)).toMatchInlineSnapshot(`
[
{
"dir": -1,
"series": [
1,
4,
],
},
{
"dir": 1,
"series": [
2,
3,
],
},
]
`);
});
test('split pos/neg with NegY', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [0, 1, 2] },
{
name: 'a',
values: [-1, null, -3],
config: { custom: { stacking: { mode: StackingMode.Normal }, transform: GraphTransform.NegativeY } },
},
{
name: 'b',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
{
name: 'c',
values: [0, 0, 0],
config: { custom: { stacking: { mode: StackingMode.Normal } } },
},
{
name: 'd',
values: [-0, null, 3],
config: { custom: { stacking: { mode: StackingMode.Normal }, transform: GraphTransform.NegativeY } },
},
],
});
expect(getStackingGroups(df)).toMatchInlineSnapshot(`
[
{
"dir": 1,
"series": [
1,
2,
3,
4,
],
},
]
`);
});
test('split on drawStyle, lineInterpolation, barAlignment', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [0, 1, 2] },
{
name: 'a',
values: [1, 2, 3],
config: {
custom: {
drawStyle: GraphDrawStyle.Bars,
barAlignment: BarAlignment.After,
stacking: { mode: StackingMode.Normal },
},
},
},
{
name: 'b',
values: [1, 2, 3],
config: {
custom: {
drawStyle: GraphDrawStyle.Bars,
barAlignment: BarAlignment.Before,
stacking: { mode: StackingMode.Normal },
},
},
},
{
name: 'c',
values: [1, 2, 3],
config: {
custom: {
drawStyle: GraphDrawStyle.Line,
lineInterpolation: LineInterpolation.Linear,
stacking: { mode: StackingMode.Normal },
},
},
},
{
name: 'd',
values: [1, 2, 3],
config: {
custom: {
drawStyle: GraphDrawStyle.Line,
lineInterpolation: LineInterpolation.Smooth,
stacking: { mode: StackingMode.Normal },
},
},
},
{
name: 'e',
values: [1, 2, 3],
config: { custom: { drawStyle: GraphDrawStyle.Points, stacking: { mode: StackingMode.Normal } } },
},
],
});
expect(getStackingGroups(df)).toMatchInlineSnapshot(`
[
{
"dir": 1,
"series": [
1,
],
},
{
"dir": 1,
"series": [
2,
],
},
{
"dir": 1,
"series": [
3,
],
},
{
"dir": 1,
"series": [
4,
],
},
{
"dir": 1,
"series": [
5,
],
},
]
`);
});
test('split on axis & units (scaleKey)', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [0, 1, 2] },
{
name: 'a',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal } }, unit: 'ft' },
},
{
name: 'b',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal } }, unit: 'degrees' },
},
],
});
expect(getStackingGroups(df)).toMatchInlineSnapshot(`
[
{
"dir": 1,
"series": [
1,
],
},
{
"dir": 1,
"series": [
2,
],
},
]
`);
});
test('split on explicit stacking group & mode & pos/neg w/NegY', () => {
const df = new MutableDataFrame({
fields: [
{ name: 'time', type: FieldType.time, values: [0, 1, 2] },
{
name: 'a',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'A' } } },
},
{
name: 'b',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'A' } } },
},
{
name: 'c',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Percent, group: 'A' } } },
},
{
name: 'd',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Normal, group: 'B' } } },
},
{
name: 'e',
values: [1, 2, 3],
config: { custom: { stacking: { mode: StackingMode.Percent, group: 'B' } } },
},
{
name: 'e',
values: [1, 2, 3],
config: {
custom: { stacking: { mode: StackingMode.Percent, group: 'B' }, transform: GraphTransform.NegativeY },
},
},
],
});
expect(getStackingGroups(df)).toMatchInlineSnapshot(`
[
{
"dir": 1,
"series": [
1,
2,
],
},
{
"dir": 1,
"series": [
3,
],
},
{
"dir": 1,
"series": [
4,
],
},
{
"dir": 1,
"series": [
5,
],
},
{
"dir": -1,
"series": [
6,
],
},
]
`);
});
});
| packages/grafana-ui/src/components/uPlot/utils.test.ts | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001791417016647756,
0.00017425997066311538,
0.00016360160952899605,
0.00017469277372583747,
0.0000026910608994512586
] |
{
"id": 1,
"code_window": [
"\t\t\t\tResourceName: azJSONModel.ResourceName,\n",
"\t\t\t}\n",
"\t\t\tazureURL = ub.BuildMetricsURL()\n",
"\t\t}\n",
"\n",
"\t\t// old model\n",
"\t\tdimension := strings.TrimSpace(azJSONModel.Dimension)\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t// POST requests are only supported at the subscription level\n",
"\t\t\tfilterInBody = false\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "add",
"edit_start_line_idx": 123
} | import { ThemeVisualizationColors } from '../themes';
export enum GrafanaThemeType {
Light = 'light',
Dark = 'dark',
}
export interface GrafanaThemeCommons {
name: string;
// TODO: not sure if should be a part of theme
breakpoints: {
xs: string;
sm: string;
md: string;
lg: string;
xl: string;
xxl: string;
};
typography: {
fontFamily: {
sansSerif: string;
monospace: string;
};
size: {
base: string;
xs: string;
sm: string;
md: string;
lg: string;
};
weight: {
light: number;
regular: number;
semibold: number;
bold: number;
};
lineHeight: {
xs: number; //1
sm: number; //1.1
md: number; // 4/3
lg: number; // 1.5
};
// TODO: Refactor to use size instead of custom defs
heading: {
h1: string;
h2: string;
h3: string;
h4: string;
h5: string;
h6: string;
};
link: {
decoration: string;
hoverDecoration: string;
};
};
spacing: {
base: number;
insetSquishMd: string;
d: string;
xxs: string;
xs: string;
sm: string;
md: string;
lg: string;
xl: string;
gutter: string;
// Next-gen forms spacing variables
// TODO: Move variables definition to respective components when implementing
formSpacingBase: number;
formMargin: string;
formFieldsetMargin: string;
formInputHeight: number;
formButtonHeight: number;
formInputPaddingHorizontal: string;
// Used for icons do define spacing between icon and input field
// Applied on the right(prefix) or left(suffix)
formInputAffixPaddingHorizontal: string;
formInputMargin: string;
formLabelPadding: string;
formLabelMargin: string;
formValidationMessagePadding: string;
formValidationMessageMargin: string;
inlineFormMargin: string;
};
border: {
radius: {
sm: string;
md: string;
lg: string;
};
width: {
sm: string;
};
};
height: {
sm: number;
md: number;
lg: number;
};
panelPadding: number;
panelHeaderHeight: number;
zIndex: {
dropdown: number;
navbarFixed: number;
sidemenu: number;
tooltip: number;
modalBackdrop: number;
modal: number;
portal: number;
typeahead: number;
};
}
export interface GrafanaTheme extends GrafanaThemeCommons {
type: GrafanaThemeType;
isDark: boolean;
isLight: boolean;
palette: {
black: string;
white: string;
dark1: string;
dark2: string;
dark3: string;
dark4: string;
dark5: string;
dark6: string;
dark7: string;
dark8: string;
dark9: string;
dark10: string;
gray1: string;
gray2: string;
gray3: string;
gray4: string;
gray5: string;
gray6: string;
gray7: string;
// New greys palette used by next-gen form elements
gray98: string;
gray97: string;
gray95: string;
gray90: string;
gray85: string;
gray70: string;
gray60: string;
gray33: string;
gray25: string;
gray15: string;
gray10: string;
gray05: string;
// New blues palette used by next-gen form elements
blue95: string;
blue85: string;
blue80: string;
blue77: string;
// New reds palette used by next-gen form elements
red88: string;
// Accent colors
redBase: string;
redShade: string;
greenBase: string;
greenShade: string;
red: string;
yellow: string;
purple: string;
orange: string;
orangeDark: string;
queryRed: string;
queryGreen: string;
queryPurple: string;
queryOrange: string;
brandPrimary: string;
brandSuccess: string;
brandWarning: string;
brandDanger: string;
// Status colors
online: string;
warn: string;
critical: string;
};
colors: {
bg1: string;
bg2: string;
bg3: string;
border1: string;
border2: string;
border3: string;
bgBlue1: string;
bgBlue2: string;
dashboardBg: string;
bodyBg: string;
panelBg: string;
panelBorder: string;
pageHeaderBg: string;
pageHeaderBorder: string;
dropdownBg: string;
dropdownShadow: string;
dropdownOptionHoverBg: string;
// Link colors
link: string;
linkDisabled: string;
linkHover: string;
linkExternal: string;
// Text colors
textStrong: string;
textHeading: string;
text: string;
textSemiWeak: string;
textWeak: string;
textFaint: string;
textBlue: string;
// Next-gen forms functional colors
formLabel: string;
formDescription: string;
formInputBg: string;
formInputBgDisabled: string;
formInputBorder: string;
formInputBorderHover: string;
formInputBorderActive: string;
formInputBorderInvalid: string;
formFocusOutline: string;
formInputText: string;
formInputDisabledText: string;
formInputPlaceholderText: string;
formValidationMessageText: string;
formValidationMessageBg: string;
};
shadows: {
listItem: string;
};
visualization: ThemeVisualizationColors;
}
| packages/grafana-data/src/types/theme.ts | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.000180696981260553,
0.0001748878275975585,
0.00016610401507932693,
0.00017651978123467416,
0.000004155260285187978
] |
{
"id": 2,
"code_window": [
"\t\tif setting.Env == setting.Dev {\n",
"\t\t\tlogger.Debug(\"Azuremonitor request\", \"params\", params)\n",
"\t\t}\n",
"\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{\n",
"\t\t\tURL: azureURL,\n",
"\t\t\tTarget: target,\n",
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tquery := &types.AzureMonitorQuery{\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 176
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/url"
"os"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
ptr "github.com/xorcare/pointer"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
func TestAzureMonitorBuildQueries(t *testing.T) {
datasource := &AzureMonitorDatasource{}
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{
SubscriptionId: "default-subscription",
},
}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
duration, _ := time.ParseDuration("400s")
wildcardFilter := "*"
testFilter := "test"
tests := []struct {
name string
azureMonitorVariedProperties map[string]interface{}
azureMonitorQueryTarget string
expectedInterval string
queryInterval time.Duration
expectedURL string
expectedFilter string
}{
{
name: "Parse queries from frontend and build AzureMonitor API queries",
azureMonitorVariedProperties: map[string]interface{}{
"resourceURI": "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana",
"timeGrain": "PT1M",
"top": "10",
},
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and time grain set to auto",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "auto",
"top": "10",
},
queryInterval: duration,
expectedInterval: "PT15M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT15M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and time grain set to auto",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "auto",
"allowedTimeGrainsMs": []int64{60000, 300000},
"top": "10",
},
queryInterval: duration,
expectedInterval: "PT5M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT5M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and has a dimension filter",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimension": "blob",
"dimensionFilter": "*",
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*'",
},
{
name: "legacy query without resourceURI and has a dimension filter and none Dimension",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimension": "None",
"dimensionFilter": "*",
"top": "10",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and has dimensionFilter*s* property with one dimension",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: &wildcardFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*'",
},
{
name: "legacy query without resourceURI and has dimensionFilter*s* property with two dimensions",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: &wildcardFilter}, {Dimension: "tier", Operator: "eq", Filter: &wildcardFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*' and tier eq '*'",
},
{
name: "legacy query without resourceURI and has a dimension filter without specifying a top",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimension": "blob",
"dimensionFilter": "*",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
expectedFilter: "blob eq '*'",
},
{
name: "has dimensionFilter*s* property with not equals operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "ne", Filter: &wildcardFilter, Filters: []string{"test"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob ne 'test'",
},
{
name: "has dimensionFilter*s* property with startsWith operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "sw", Filter: &testFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob sw 'test'",
},
{
name: "correctly sets dimension operator to eq (irrespective of operator) when filter value is '*'",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "sw", Filter: &wildcardFilter}, {Dimension: "tier", Operator: "ne", Filter: &wildcardFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*' and tier eq '*'",
},
{
name: "correctly constructs target when multiple filter values are provided for the 'eq' operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: &wildcardFilter, Filters: []string{"test", "test2"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq 'test' or blob eq 'test2'",
},
{
name: "correctly constructs target when multiple filter values are provided for ne 'eq' operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "ne", Filter: &wildcardFilter, Filters: []string{"test", "test2"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob ne 'test' and blob ne 'test2'",
},
{
name: "Includes a region",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"top": "10",
"region": "westus",
},
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
expectedURL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics",
},
{
name: "includes a resource as a filter",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"resources": []types.AzureMonitorResource{{ResourceGroup: "rg", ResourceName: "vm"}},
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
expectedFilter: "Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm'",
},
{
name: "includes a resource and a dimesion as filters",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"resources": []types.AzureMonitorResource{{ResourceGroup: "rg", ResourceName: "vm"}},
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "ne", Filter: &wildcardFilter, Filters: []string{"test", "test2"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "(Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm') and (blob ne 'test' and blob ne 'test2')",
},
}
commonAzureModelProps := map[string]interface{}{
"aggregation": "Average",
"resourceGroup": "grafanastaging",
"resourceName": "grafana",
"metricNamespace": "Microsoft.Compute/virtualMachines",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Azure Monitor",
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
for k, v := range commonAzureModelProps {
tt.azureMonitorVariedProperties[k] = v
}
azureMonitorJSON, _ := json.Marshal(tt.azureMonitorVariedProperties)
tsdbQuery := []backend.DataQuery{
{
JSON: []byte(fmt.Sprintf(`{
"subscription": "12345678-aaaa-bbbb-cccc-123456789abc",
"azureMonitor": %s
}`, string(azureMonitorJSON))),
RefID: "A",
Interval: tt.queryInterval,
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
},
}
azureMonitorQuery := &types.AzureMonitorQuery{
URL: tt.expectedURL,
Target: tt.azureMonitorQueryTarget,
RefID: "A",
Alias: "testalias",
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
Filter: tt.expectedFilter,
}
if azureMonitorQuery.URL == "" {
azureMonitorQuery.URL = "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics"
}
queries, err := datasource.buildQueries(log.New("test"), tsdbQuery, dsInfo)
require.NoError(t, err)
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, "Params")); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
expected := `http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/` +
`TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%222018-03-15T13%3A00%3A00Z%22%2C%22endTime%22%3A%222018-03-15T13%3A34%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C` +
`%22name%22%3A%22Percentage%20CPU%22%2C%22aggregationType%22%3A4%2C%22namespace%22%3A%22Microsoft.Compute%2FvirtualMachines%22%2C%22metricVisualization%22%3A%7B%22displayName%22%3A%22Percentage%20CPU%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`
actual, err := getQueryUrl(queries[0], "http://ds", "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana", "grafana")
require.NoError(t, err)
require.Equal(t, expected, actual)
})
}
}
func TestCustomNamespace(t *testing.T) {
datasource := &AzureMonitorDatasource{}
t.Run("it should set the metricNamespace to a customNamespace value if customNamespace is present as a parameter", func(t *testing.T) {
q := []backend.DataQuery{
{
JSON: []byte(`{
"azureMonitor": {
"customNamespace": "custom/namespace"
}
}`),
},
}
result, err := datasource.buildQueries(log.New("test"), q, types.DatasourceInfo{})
require.NoError(t, err)
expected := "custom/namespace"
require.Equal(t, expected, result[0].Params.Get("metricnamespace"))
})
}
func makeDates(startDate time.Time, count int, interval time.Duration) (times []time.Time) {
for i := 0; i < count; i++ {
times = append(times, startDate.Add(interval*time.Duration(i)))
}
return
}
func makeTestDataLink(url string) data.DataLink {
return data.DataLink{
Title: "View in Azure Portal",
TargetBlank: true,
URL: url,
}
}
func TestAzureMonitorParseResponse(t *testing.T) {
// datalinks for the test frames
averageLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A4%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
totalLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A1%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
maxLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A3%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
minLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A2%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
countLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A7%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
tests := []struct {
name string
responseFile string
mockQuery *types.AzureMonitorQuery
expectedFrames data.Frames
queryIntervalMS int64
}{
{
name: "average aggregate time series response",
responseFile: "1-azure-monitor-response-avg.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(2.0875), ptr.Float64(2.1525), ptr.Float64(2.155), ptr.Float64(3.6925), ptr.Float64(2.44),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{averageLink}})),
},
},
{
name: "total aggregate time series response",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{totalLink}})),
},
},
{
name: "maximum aggregate time series response",
responseFile: "3-azure-monitor-response-maximum.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Maximum"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 14, 26, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{maxLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(3.07), ptr.Float64(2.92), ptr.Float64(2.87), ptr.Float64(2.27), ptr.Float64(2.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{maxLink}})),
},
},
{
name: "minimum aggregate time series response",
responseFile: "4-azure-monitor-response-minimum.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Minimum"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 14, 43, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{minLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(1.51), ptr.Float64(2.38), ptr.Float64(1.69), ptr.Float64(2.27), ptr.Float64(1.96),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{minLink}})),
},
},
{
name: "count aggregate time series response",
responseFile: "5-azure-monitor-response-count.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Count"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 14, 44, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{countLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{countLink}})),
},
},
{
name: "single dimension time series response",
responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"},
[]*float64{ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"},
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
},
},
{
name: "with alias patterns in the query",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
},
},
{
name: "single dimension with alias",
responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "{{dimensionname}}={{DimensionValue}}",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=PageBlob", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"}, []*float64{
ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil,
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=BlockBlob", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"}, []*float64{
ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil,
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=Azure Data Lake Storage", Links: []data.DataLink{averageLink}})),
},
},
{
name: "multiple dimension time series response with label alias",
responseFile: "7-azure-monitor-response-multi-dimension.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Capacity", data.Labels{"blobtype": "PageBlob", "tier": "Standard"},
[]*float64{ptr.Float64(675530), ptr.Float64(675530), ptr.Float64(675530)}).SetConfig(
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=PageBlob, Tier=Standard}", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Capacity", data.Labels{"blobtype": "BlockBlob", "tier": "Hot"},
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=BlockBlob, Tier=Hot}", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Capacity", data.Labels{"blobtype": "Azure Data Lake Storage", "tier": "Cool"},
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=Azure Data Lake Storage, Tier=Cool}", Links: []data.DataLink{averageLink}})),
},
},
{
name: "unspecified unit with alias should not panic",
responseFile: "8-azure-monitor-response-unspecified-unit.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
[]time.Time{time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC)},
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(2.0875),
}).SetConfig(&data.FieldConfig{DisplayName: "custom", Links: []data.DataLink{averageLink}})),
},
},
{
name: "with legacy azure monitor query properties and without a resource uri",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
},
},
{
name: "with legacy azure monitor query properties and with a resource uri it should use the resource uri",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
},
},
{
name: "multiple time series response",
responseFile: "9-azure-monitor-response-multi.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Percentage CPU", data.Labels{"microsoft.resourceid": "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana"}, []*float64{
ptr.Float64(2.0875), ptr.Float64(2.1525), ptr.Float64(2.155), ptr.Float64(3.6925), ptr.Float64(2.44),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{averageLink}}),
),
},
},
}
datasource := &AzureMonitorDatasource{}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
azData := loadTestFile(t, "azuremonitor/"+tt.responseFile)
dframes, err := datasource.parseResponse(azData, tt.mockQuery, "http://ds")
require.NoError(t, err)
require.NotNil(t, dframes)
if diff := cmp.Diff(tt.expectedFrames, dframes, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestFindClosestAllowIntervalMS(t *testing.T) {
humanIntervalToMS := map[string]int64{
"3m": 180000,
"5m": 300000,
"10m": 600000,
"15m": 900000,
"1d": 86400000,
"2d": 172800000,
}
tests := []struct {
name string
allowedTimeGrains []int64 // Note: Uses defaults when empty list
inputInterval int64
expectedInterval int64
}{
{
name: "closest to 3m is 5m",
allowedTimeGrains: []int64{},
inputInterval: humanIntervalToMS["3m"],
expectedInterval: humanIntervalToMS["5m"],
},
{
name: "closest to 10m is 15m",
allowedTimeGrains: []int64{},
inputInterval: humanIntervalToMS["10m"],
expectedInterval: humanIntervalToMS["15m"],
},
{
name: "closest to 2d is 1d",
allowedTimeGrains: []int64{},
inputInterval: humanIntervalToMS["2d"],
expectedInterval: humanIntervalToMS["1d"],
},
{
name: "closest to 3m is 1d when 1d is only allowed interval",
allowedTimeGrains: []int64{humanIntervalToMS["1d"]},
inputInterval: humanIntervalToMS["2d"],
expectedInterval: humanIntervalToMS["1d"],
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
interval := azTime.FindClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains)
require.Equal(t, tt.expectedInterval, interval)
})
}
}
func loadTestFile(t *testing.T, name string) types.AzureMonitorResponse {
t.Helper()
path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test
// nolint:gosec
jsonBody, err := os.ReadFile(path)
require.NoError(t, err)
var azData types.AzureMonitorResponse
err = json.Unmarshal(jsonBody, &azData)
require.NoError(t, err)
return azData
}
func TestAzureMonitorCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds/"
tests := []struct {
name string
expectedURL string
expectedHeaders http.Header
Err require.ErrorAssertionFunc
}{
{
name: "creates a request",
expectedURL: "http://ds/",
expectedHeaders: http.Header{
"Content-Type": []string{"application/json"},
},
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ds := AzureMonitorDatasource{}
req, err := ds.createRequest(ctx, log.New("test"), url)
tt.Err(t, err)
if req.URL.String() != tt.expectedURL {
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
}
if !cmp.Equal(req.Header, tt.expectedHeaders) {
t.Errorf("Unexpected HTTP headers: %v", cmp.Diff(req.Header, tt.expectedHeaders))
}
})
}
}
func TestExtractResourceNameFromMetricsURL(t *testing.T) {
t.Run("it should extract the resourceName from a well-formed Metrics URL", func(t *testing.T) {
url := "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/Grafana-Test.VM/providers/microsoft.insights/metrics"
expected := "Grafana-Test.VM"
require.Equal(t, expected, extractResourceNameFromMetricsURL((url)))
})
t.Run("it should extract the resourceName from a well-formed Metrics URL in a case insensitive manner", func(t *testing.T) {
url := "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/Grafana-Test.VM/pRoViDeRs/MiCrOsOfT.iNsIgHtS/mEtRiCs"
expected := "Grafana-Test.VM"
require.Equal(t, expected, extractResourceNameFromMetricsURL((url)))
})
t.Run("it should return an empty string if no match is found", func(t *testing.T) {
url := "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/Grafana-Test.VM/providers/microsoft.insights/nope-this-part-does-not-match"
expected := ""
require.Equal(t, expected, extractResourceNameFromMetricsURL((url)))
})
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.9985484480857849,
0.1836998015642166,
0.000162752068717964,
0.003088580910116434,
0.35600563883781433
] |
{
"id": 2,
"code_window": [
"\t\tif setting.Env == setting.Dev {\n",
"\t\t\tlogger.Debug(\"Azuremonitor request\", \"params\", params)\n",
"\t\t}\n",
"\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{\n",
"\t\t\tURL: azureURL,\n",
"\t\t\tTarget: target,\n",
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tquery := &types.AzureMonitorQuery{\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 176
} | <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M20,8.94a1.31,1.31,0,0,0-.06-.27l0-.09a1.07,1.07,0,0,0-.19-.28h0l-6-6h0a1.07,1.07,0,0,0-.28-.19l-.09,0A.88.88,0,0,0,13.05,2H7A3,3,0,0,0,4,5V19a3,3,0,0,0,3,3H17a3,3,0,0,0,3-3V9S20,9,20,8.94ZM14,5.41,16.59,8H15a1,1,0,0,1-1-1ZM18,19a1,1,0,0,1-1,1H7a1,1,0,0,1-1-1V5A1,1,0,0,1,7,4h5V7a3,3,0,0,0,3,3h3Z"/></svg> | public/img/icons/unicons/file-blank.svg | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017218873836100101,
0.00017218873836100101,
0.00017218873836100101,
0.00017218873836100101,
0
] |
{
"id": 2,
"code_window": [
"\t\tif setting.Env == setting.Dev {\n",
"\t\t\tlogger.Debug(\"Azuremonitor request\", \"params\", params)\n",
"\t\t}\n",
"\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{\n",
"\t\t\tURL: azureURL,\n",
"\t\t\tTarget: target,\n",
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tquery := &types.AzureMonitorQuery{\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 176
} | import {
FieldOverrideContext,
FieldType,
getFieldDisplayName,
PanelPlugin,
ReducerID,
standardEditorsRegistry,
} from '@grafana/data';
import { TableFieldOptions } from '@grafana/schema';
import { TableCellDisplayMode } from '@grafana/ui';
import { PaginationEditor } from './PaginationEditor';
import { TablePanel } from './TablePanel';
import { tableMigrationHandler, tablePanelChangedHandler } from './migrations';
import { PanelOptions, defaultPanelOptions, defaultPanelFieldConfig } from './models.gen';
import { TableSuggestionsSupplier } from './suggestions';
const footerCategory = 'Table footer';
export const plugin = new PanelPlugin<PanelOptions, TableFieldOptions>(TablePanel)
.setPanelChangeHandler(tablePanelChangedHandler)
.setMigrationHandler(tableMigrationHandler)
.setNoPadding()
.useFieldConfig({
useCustomConfig: (builder) => {
builder
.addNumberInput({
path: 'minWidth',
name: 'Minimum column width',
description: 'The minimum width for column auto resizing',
settings: {
placeholder: '150',
min: 50,
max: 500,
},
shouldApply: () => true,
defaultValue: defaultPanelFieldConfig.minWidth,
})
.addNumberInput({
path: 'width',
name: 'Column width',
settings: {
placeholder: 'auto',
min: 20,
max: 300,
},
shouldApply: () => true,
defaultValue: defaultPanelFieldConfig.width,
})
.addRadio({
path: 'align',
name: 'Column alignment',
settings: {
options: [
{ label: 'auto', value: 'auto' },
{ label: 'left', value: 'left' },
{ label: 'center', value: 'center' },
{ label: 'right', value: 'right' },
],
},
defaultValue: defaultPanelFieldConfig.align,
})
.addSelect({
path: 'displayMode',
name: 'Cell display mode',
description: 'Color text, background, show as gauge, etc',
settings: {
options: [
{ value: TableCellDisplayMode.Auto, label: 'Auto' },
{ value: TableCellDisplayMode.ColorText, label: 'Color text' },
{ value: TableCellDisplayMode.ColorBackground, label: 'Color background (gradient)' },
{ value: TableCellDisplayMode.ColorBackgroundSolid, label: 'Color background (solid)' },
{ value: TableCellDisplayMode.GradientGauge, label: 'Gradient gauge' },
{ value: TableCellDisplayMode.LcdGauge, label: 'LCD gauge' },
{ value: TableCellDisplayMode.BasicGauge, label: 'Basic gauge' },
{ value: TableCellDisplayMode.JSONView, label: 'JSON View' },
{ value: TableCellDisplayMode.Image, label: 'Image' },
],
},
defaultValue: defaultPanelFieldConfig.displayMode,
})
.addBooleanSwitch({
path: 'inspect',
name: 'Cell value inspect',
description: 'Enable cell value inspection in a modal window',
defaultValue: false,
showIf: (cfg) => {
return (
cfg.displayMode === TableCellDisplayMode.Auto ||
cfg.displayMode === TableCellDisplayMode.JSONView ||
cfg.displayMode === TableCellDisplayMode.ColorText ||
cfg.displayMode === TableCellDisplayMode.ColorBackground ||
cfg.displayMode === TableCellDisplayMode.ColorBackgroundSolid
);
},
})
.addBooleanSwitch({
path: 'filterable',
name: 'Column filter',
description: 'Enables/disables field filters in table',
defaultValue: defaultPanelFieldConfig.filterable,
})
.addBooleanSwitch({
path: 'hidden',
name: 'Hide in table',
defaultValue: undefined,
hideFromDefaults: true,
});
},
})
.setPanelOptions((builder) => {
builder
.addBooleanSwitch({
path: 'showHeader',
name: 'Show table header',
defaultValue: defaultPanelOptions.showHeader,
})
.addBooleanSwitch({
path: 'footer.show',
category: [footerCategory],
name: 'Show table footer',
defaultValue: defaultPanelOptions.footer?.show,
})
.addCustomEditor({
id: 'footer.reducer',
category: [footerCategory],
path: 'footer.reducer',
name: 'Calculation',
description: 'Choose a reducer function / calculation',
editor: standardEditorsRegistry.get('stats-picker').editor as any,
defaultValue: [ReducerID.sum],
showIf: (cfg) => cfg.footer?.show,
})
.addBooleanSwitch({
path: 'footer.countRows',
category: [footerCategory],
name: 'Count rows',
description: 'Display a single count for all data rows',
defaultValue: defaultPanelOptions.footer?.countRows,
showIf: (cfg) => cfg.footer?.reducer?.length === 1 && cfg.footer?.reducer[0] === ReducerID.count,
})
.addMultiSelect({
path: 'footer.fields',
category: [footerCategory],
name: 'Fields',
description: 'Select the fields that should be calculated',
settings: {
allowCustomValue: false,
options: [],
placeholder: 'All Numeric Fields',
getOptions: async (context: FieldOverrideContext) => {
const options = [];
if (context && context.data && context.data.length > 0) {
const frame = context.data[0];
for (const field of frame.fields) {
if (field.type === FieldType.number) {
const name = getFieldDisplayName(field, frame, context.data);
const value = field.name;
options.push({ value, label: name } as any);
}
}
}
return options;
},
},
defaultValue: '',
showIf: (cfg) =>
(cfg.footer?.show && !cfg.footer?.countRows) ||
(cfg.footer?.reducer?.length === 1 && cfg.footer?.reducer[0] !== ReducerID.count),
})
.addCustomEditor({
id: 'footer.enablePagination',
path: 'footer.enablePagination',
name: 'Enable pagination',
editor: PaginationEditor,
});
})
.setSuggestionsSupplier(new TableSuggestionsSupplier());
| public/app/plugins/panel/table/module.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00024444222799502313,
0.00017675379058346152,
0.00016731226060073823,
0.00017348276742268354,
0.000016624162526568398
] |
{
"id": 2,
"code_window": [
"\t\tif setting.Env == setting.Dev {\n",
"\t\t\tlogger.Debug(\"Azuremonitor request\", \"params\", params)\n",
"\t\t}\n",
"\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{\n",
"\t\t\tURL: azureURL,\n",
"\t\t\tTarget: target,\n",
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\tquery := &types.AzureMonitorQuery{\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 176
} | package setting
type OrgQuota struct {
User int64 `target:"org_user"`
DataSource int64 `target:"data_source"`
Dashboard int64 `target:"dashboard"`
ApiKey int64 `target:"api_key"`
AlertRule int64 `target:"alert_rule"`
}
type UserQuota struct {
Org int64 `target:"org_user"`
}
type GlobalQuota struct {
Org int64 `target:"org"`
User int64 `target:"user"`
DataSource int64 `target:"data_source"`
Dashboard int64 `target:"dashboard"`
ApiKey int64 `target:"api_key"`
Session int64 `target:"-"`
AlertRule int64 `target:"alert_rule"`
File int64 `target:"file"`
}
type QuotaSettings struct {
Enabled bool
Org OrgQuota
User UserQuota
Global GlobalQuota
}
func (cfg *Cfg) readQuotaSettings() {
// set global defaults.
quota := cfg.Raw.Section("quota")
cfg.Quota.Enabled = quota.Key("enabled").MustBool(false)
var alertOrgQuota int64
var alertGlobalQuota int64
if cfg.UnifiedAlerting.IsEnabled() {
alertOrgQuota = quota.Key("org_alert_rule").MustInt64(100)
alertGlobalQuota = quota.Key("global_alert_rule").MustInt64(-1)
}
// per ORG Limits
cfg.Quota.Org = OrgQuota{
User: quota.Key("org_user").MustInt64(10),
DataSource: quota.Key("org_data_source").MustInt64(10),
Dashboard: quota.Key("org_dashboard").MustInt64(10),
ApiKey: quota.Key("org_api_key").MustInt64(10),
AlertRule: alertOrgQuota,
}
// per User limits
cfg.Quota.User = UserQuota{
Org: quota.Key("user_org").MustInt64(10),
}
// Global Limits
cfg.Quota.Global = GlobalQuota{
User: quota.Key("global_user").MustInt64(-1),
Org: quota.Key("global_org").MustInt64(-1),
DataSource: quota.Key("global_data_source").MustInt64(-1),
Dashboard: quota.Key("global_dashboard").MustInt64(-1),
ApiKey: quota.Key("global_api_key").MustInt64(-1),
Session: quota.Key("global_session").MustInt64(-1),
File: quota.Key("global_file").MustInt64(-1),
AlertRule: alertGlobalQuota,
}
}
| pkg/setting/setting_quota.go | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0006581101333722472,
0.0002550048811826855,
0.00016532407607883215,
0.00017264281632378697,
0.000168347978615202
] |
{
"id": 3,
"code_window": [
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n",
"\t\t\tAlias: alias,\n",
"\t\t\tTimeRange: query.TimeRange,\n",
"\t\t\tFilter: filterString,\n",
"\t\t})\n",
"\t}\n",
"\n",
"\treturn azureMonitorQueries, nil\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t}\n",
"\t\tif filterString != \"\" {\n",
"\t\t\tif filterInBody {\n",
"\t\t\t\tquery.BodyFilter = filterString\n",
"\t\t\t} else {\n",
"\t\t\t\tquery.Params.Add(\"$filter\", filterString)\n",
"\t\t\t}\n",
"\t\t}\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, query)\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 183
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/url"
"os"
"path/filepath"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
ptr "github.com/xorcare/pointer"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
func TestAzureMonitorBuildQueries(t *testing.T) {
datasource := &AzureMonitorDatasource{}
dsInfo := types.DatasourceInfo{
Settings: types.AzureMonitorSettings{
SubscriptionId: "default-subscription",
},
}
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
duration, _ := time.ParseDuration("400s")
wildcardFilter := "*"
testFilter := "test"
tests := []struct {
name string
azureMonitorVariedProperties map[string]interface{}
azureMonitorQueryTarget string
expectedInterval string
queryInterval time.Duration
expectedURL string
expectedFilter string
}{
{
name: "Parse queries from frontend and build AzureMonitor API queries",
azureMonitorVariedProperties: map[string]interface{}{
"resourceURI": "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana",
"timeGrain": "PT1M",
"top": "10",
},
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and time grain set to auto",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "auto",
"top": "10",
},
queryInterval: duration,
expectedInterval: "PT15M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT15M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and time grain set to auto",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "auto",
"allowedTimeGrainsMs": []int64{60000, 300000},
"top": "10",
},
queryInterval: duration,
expectedInterval: "PT5M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT5M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and has a dimension filter",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimension": "blob",
"dimensionFilter": "*",
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*'",
},
{
name: "legacy query without resourceURI and has a dimension filter and none Dimension",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimension": "None",
"dimensionFilter": "*",
"top": "10",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
},
{
name: "legacy query without resourceURI and has dimensionFilter*s* property with one dimension",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: &wildcardFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*'",
},
{
name: "legacy query without resourceURI and has dimensionFilter*s* property with two dimensions",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: &wildcardFilter}, {Dimension: "tier", Operator: "eq", Filter: &wildcardFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*' and tier eq '*'",
},
{
name: "legacy query without resourceURI and has a dimension filter without specifying a top",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimension": "blob",
"dimensionFilter": "*",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
expectedFilter: "blob eq '*'",
},
{
name: "has dimensionFilter*s* property with not equals operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "ne", Filter: &wildcardFilter, Filters: []string{"test"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob ne 'test'",
},
{
name: "has dimensionFilter*s* property with startsWith operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "sw", Filter: &testFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob sw 'test'",
},
{
name: "correctly sets dimension operator to eq (irrespective of operator) when filter value is '*'",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "sw", Filter: &wildcardFilter}, {Dimension: "tier", Operator: "ne", Filter: &wildcardFilter}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq '*' and tier eq '*'",
},
{
name: "correctly constructs target when multiple filter values are provided for the 'eq' operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: &wildcardFilter, Filters: []string{"test", "test2"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob eq 'test' or blob eq 'test2'",
},
{
name: "correctly constructs target when multiple filter values are provided for ne 'eq' operator",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "ne", Filter: &wildcardFilter, Filters: []string{"test", "test2"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "blob ne 'test' and blob ne 'test2'",
},
{
name: "Includes a region",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"top": "10",
"region": "westus",
},
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
expectedURL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics",
},
{
name: "includes a resource as a filter",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"resources": []types.AzureMonitorResource{{ResourceGroup: "rg", ResourceName: "vm"}},
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z",
expectedFilter: "Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm'",
},
{
name: "includes a resource and a dimesion as filters",
azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M",
"resources": []types.AzureMonitorResource{{ResourceGroup: "rg", ResourceName: "vm"}},
"dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "ne", Filter: &wildcardFilter, Filters: []string{"test", "test2"}}},
"top": "30",
},
queryInterval: duration,
expectedInterval: "PT1M",
azureMonitorQueryTarget: "aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z&top=30",
expectedFilter: "(Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm') and (blob ne 'test' and blob ne 'test2')",
},
}
commonAzureModelProps := map[string]interface{}{
"aggregation": "Average",
"resourceGroup": "grafanastaging",
"resourceName": "grafana",
"metricNamespace": "Microsoft.Compute/virtualMachines",
"metricName": "Percentage CPU",
"alias": "testalias",
"queryType": "Azure Monitor",
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
for k, v := range commonAzureModelProps {
tt.azureMonitorVariedProperties[k] = v
}
azureMonitorJSON, _ := json.Marshal(tt.azureMonitorVariedProperties)
tsdbQuery := []backend.DataQuery{
{
JSON: []byte(fmt.Sprintf(`{
"subscription": "12345678-aaaa-bbbb-cccc-123456789abc",
"azureMonitor": %s
}`, string(azureMonitorJSON))),
RefID: "A",
Interval: tt.queryInterval,
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
},
}
azureMonitorQuery := &types.AzureMonitorQuery{
URL: tt.expectedURL,
Target: tt.azureMonitorQueryTarget,
RefID: "A",
Alias: "testalias",
TimeRange: backend.TimeRange{
From: fromStart,
To: fromStart.Add(34 * time.Minute),
},
Filter: tt.expectedFilter,
}
if azureMonitorQuery.URL == "" {
azureMonitorQuery.URL = "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics"
}
queries, err := datasource.buildQueries(log.New("test"), tsdbQuery, dsInfo)
require.NoError(t, err)
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, "Params")); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
expected := `http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/` +
`TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%222018-03-15T13%3A00%3A00Z%22%2C%22endTime%22%3A%222018-03-15T13%3A34%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C` +
`%22name%22%3A%22Percentage%20CPU%22%2C%22aggregationType%22%3A4%2C%22namespace%22%3A%22Microsoft.Compute%2FvirtualMachines%22%2C%22metricVisualization%22%3A%7B%22displayName%22%3A%22Percentage%20CPU%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`
actual, err := getQueryUrl(queries[0], "http://ds", "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana", "grafana")
require.NoError(t, err)
require.Equal(t, expected, actual)
})
}
}
func TestCustomNamespace(t *testing.T) {
datasource := &AzureMonitorDatasource{}
t.Run("it should set the metricNamespace to a customNamespace value if customNamespace is present as a parameter", func(t *testing.T) {
q := []backend.DataQuery{
{
JSON: []byte(`{
"azureMonitor": {
"customNamespace": "custom/namespace"
}
}`),
},
}
result, err := datasource.buildQueries(log.New("test"), q, types.DatasourceInfo{})
require.NoError(t, err)
expected := "custom/namespace"
require.Equal(t, expected, result[0].Params.Get("metricnamespace"))
})
}
func makeDates(startDate time.Time, count int, interval time.Duration) (times []time.Time) {
for i := 0; i < count; i++ {
times = append(times, startDate.Add(interval*time.Duration(i)))
}
return
}
func makeTestDataLink(url string) data.DataLink {
return data.DataLink{
Title: "View in Azure Portal",
TargetBlank: true,
URL: url,
}
}
func TestAzureMonitorParseResponse(t *testing.T) {
// datalinks for the test frames
averageLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A4%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
totalLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A1%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
maxLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A3%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
minLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A2%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
countLink := makeTestDataLink(`http://ds/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%7B%22absolute%22%3A%7B%22startTime%22%3A%220001-01-01T00%3A00%3A00Z%22%2C%22endTime%22%3A%220001-01-01T00%3A00%3A00Z%22%7D%7D/` +
`ChartDefinition/%7B%22v2charts%22%3A%5B%7B%22metrics%22%3A%5B%7B%22resourceMetadata%22%3A%7B%22id%22%3A%22%2Fsubscriptions%2F12345678-aaaa-bbbb-cccc-123456789abc%2FresourceGroups%2Fgrafanastaging%2Fproviders%2FMicrosoft.Compute%2FvirtualMachines%2Fgrafana%22%7D%2C%22name%22%3A%22%22%2C%22aggregationType%22%3A7%2C%22namespace%22%3A%22%22%2C` +
`%22metricVisualization%22%3A%7B%22displayName%22%3A%22%22%2C%22resourceDisplayName%22%3A%22grafana%22%7D%7D%5D%7D%5D%7D`)
tests := []struct {
name string
responseFile string
mockQuery *types.AzureMonitorQuery
expectedFrames data.Frames
queryIntervalMS int64
}{
{
name: "average aggregate time series response",
responseFile: "1-azure-monitor-response-avg.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(2.0875), ptr.Float64(2.1525), ptr.Float64(2.155), ptr.Float64(3.6925), ptr.Float64(2.44),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{averageLink}})),
},
},
{
name: "total aggregate time series response",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{totalLink}})),
},
},
{
name: "maximum aggregate time series response",
responseFile: "3-azure-monitor-response-maximum.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Maximum"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 14, 26, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{maxLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(3.07), ptr.Float64(2.92), ptr.Float64(2.87), ptr.Float64(2.27), ptr.Float64(2.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{maxLink}})),
},
},
{
name: "minimum aggregate time series response",
responseFile: "4-azure-monitor-response-minimum.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Minimum"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 14, 43, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{minLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(1.51), ptr.Float64(2.38), ptr.Float64(1.69), ptr.Float64(2.27), ptr.Float64(1.96),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{minLink}})),
},
},
{
name: "count aggregate time series response",
responseFile: "5-azure-monitor-response-count.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Count"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 14, 44, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{countLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4), ptr.Float64(4),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{countLink}})),
},
},
{
name: "single dimension time series response",
responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"},
[]*float64{ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"},
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil}).SetConfig(&data.FieldConfig{Unit: "short", Links: []data.DataLink{averageLink}})),
},
},
{
name: "with alias patterns in the query",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
},
},
{
name: "single dimension with alias",
responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "{{dimensionname}}={{DimensionValue}}",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "PageBlob"},
[]*float64{ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), ptr.Float64(3), nil}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=PageBlob", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "BlockBlob"}, []*float64{
ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), ptr.Float64(1), nil,
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=BlockBlob", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 15, 21, 0, 0, time.UTC), 6, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Count", data.Labels{"blobtype": "Azure Data Lake Storage"}, []*float64{
ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), ptr.Float64(0), nil,
}).SetConfig(&data.FieldConfig{Unit: "short", DisplayName: "blobtype=Azure Data Lake Storage", Links: []data.DataLink{averageLink}})),
},
},
{
name: "multiple dimension time series response with label alias",
responseFile: "7-azure-monitor-response-multi-dimension.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Capacity", data.Labels{"blobtype": "PageBlob", "tier": "Standard"},
[]*float64{ptr.Float64(675530), ptr.Float64(675530), ptr.Float64(675530)}).SetConfig(
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=PageBlob, Tier=Standard}", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Capacity", data.Labels{"blobtype": "BlockBlob", "tier": "Hot"},
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=BlockBlob, Tier=Hot}", Links: []data.DataLink{averageLink}})),
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2020, 06, 30, 9, 58, 0, 0, time.UTC), 3, time.Hour),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Blob Capacity", data.Labels{"blobtype": "Azure Data Lake Storage", "tier": "Cool"},
[]*float64{ptr.Float64(0), ptr.Float64(0), ptr.Float64(0)}).SetConfig(
&data.FieldConfig{Unit: "decbytes", DisplayName: "danieltest {Blob Type=Azure Data Lake Storage, Tier=Cool}", Links: []data.DataLink{averageLink}})),
},
},
{
name: "unspecified unit with alias should not panic",
responseFile: "8-azure-monitor-response-unspecified-unit.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
[]time.Time{time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC)},
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(2.0875),
}).SetConfig(&data.FieldConfig{DisplayName: "custom", Links: []data.DataLink{averageLink}})),
},
},
{
name: "with legacy azure monitor query properties and without a resource uri",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
},
},
{
name: "with legacy azure monitor query properties and with a resource uri it should use the resource uri",
responseFile: "2-azure-monitor-response-total.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
Params: url.Values{
"aggregation": {"Total"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 9, 13, 29, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{totalLink}}),
data.NewField("Percentage CPU", nil, []*float64{
ptr.Float64(8.26), ptr.Float64(8.7), ptr.Float64(14.82), ptr.Float64(10.07), ptr.Float64(8.52),
}).SetConfig(&data.FieldConfig{Unit: "percent", DisplayName: "custom grafanastaging Microsoft.Compute/virtualMachines grafana Percentage CPU", Links: []data.DataLink{totalLink}})),
},
},
{
name: "multiple time series response",
responseFile: "9-azure-monitor-response-multi.json",
mockQuery: &types.AzureMonitorQuery{
URL: "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics",
Params: url.Values{
"aggregation": {"Average"},
},
},
expectedFrames: data.Frames{
data.NewFrame("",
data.NewField("Time", nil,
makeDates(time.Date(2019, 2, 8, 10, 13, 0, 0, time.UTC), 5, time.Minute),
).SetConfig(&data.FieldConfig{Links: []data.DataLink{averageLink}}),
data.NewField("Percentage CPU", data.Labels{"microsoft.resourceid": "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana"}, []*float64{
ptr.Float64(2.0875), ptr.Float64(2.1525), ptr.Float64(2.155), ptr.Float64(3.6925), ptr.Float64(2.44),
}).SetConfig(&data.FieldConfig{Unit: "percent", Links: []data.DataLink{averageLink}}),
),
},
},
}
datasource := &AzureMonitorDatasource{}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
azData := loadTestFile(t, "azuremonitor/"+tt.responseFile)
dframes, err := datasource.parseResponse(azData, tt.mockQuery, "http://ds")
require.NoError(t, err)
require.NotNil(t, dframes)
if diff := cmp.Diff(tt.expectedFrames, dframes, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestFindClosestAllowIntervalMS(t *testing.T) {
humanIntervalToMS := map[string]int64{
"3m": 180000,
"5m": 300000,
"10m": 600000,
"15m": 900000,
"1d": 86400000,
"2d": 172800000,
}
tests := []struct {
name string
allowedTimeGrains []int64 // Note: Uses defaults when empty list
inputInterval int64
expectedInterval int64
}{
{
name: "closest to 3m is 5m",
allowedTimeGrains: []int64{},
inputInterval: humanIntervalToMS["3m"],
expectedInterval: humanIntervalToMS["5m"],
},
{
name: "closest to 10m is 15m",
allowedTimeGrains: []int64{},
inputInterval: humanIntervalToMS["10m"],
expectedInterval: humanIntervalToMS["15m"],
},
{
name: "closest to 2d is 1d",
allowedTimeGrains: []int64{},
inputInterval: humanIntervalToMS["2d"],
expectedInterval: humanIntervalToMS["1d"],
},
{
name: "closest to 3m is 1d when 1d is only allowed interval",
allowedTimeGrains: []int64{humanIntervalToMS["1d"]},
inputInterval: humanIntervalToMS["2d"],
expectedInterval: humanIntervalToMS["1d"],
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
interval := azTime.FindClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains)
require.Equal(t, tt.expectedInterval, interval)
})
}
}
func loadTestFile(t *testing.T, name string) types.AzureMonitorResponse {
t.Helper()
path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test
// nolint:gosec
jsonBody, err := os.ReadFile(path)
require.NoError(t, err)
var azData types.AzureMonitorResponse
err = json.Unmarshal(jsonBody, &azData)
require.NoError(t, err)
return azData
}
func TestAzureMonitorCreateRequest(t *testing.T) {
ctx := context.Background()
url := "http://ds/"
tests := []struct {
name string
expectedURL string
expectedHeaders http.Header
Err require.ErrorAssertionFunc
}{
{
name: "creates a request",
expectedURL: "http://ds/",
expectedHeaders: http.Header{
"Content-Type": []string{"application/json"},
},
Err: require.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ds := AzureMonitorDatasource{}
req, err := ds.createRequest(ctx, log.New("test"), url)
tt.Err(t, err)
if req.URL.String() != tt.expectedURL {
t.Errorf("Expecting %s, got %s", tt.expectedURL, req.URL.String())
}
if !cmp.Equal(req.Header, tt.expectedHeaders) {
t.Errorf("Unexpected HTTP headers: %v", cmp.Diff(req.Header, tt.expectedHeaders))
}
})
}
}
func TestExtractResourceNameFromMetricsURL(t *testing.T) {
t.Run("it should extract the resourceName from a well-formed Metrics URL", func(t *testing.T) {
url := "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/Grafana-Test.VM/providers/microsoft.insights/metrics"
expected := "Grafana-Test.VM"
require.Equal(t, expected, extractResourceNameFromMetricsURL((url)))
})
t.Run("it should extract the resourceName from a well-formed Metrics URL in a case insensitive manner", func(t *testing.T) {
url := "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/Grafana-Test.VM/pRoViDeRs/MiCrOsOfT.iNsIgHtS/mEtRiCs"
expected := "Grafana-Test.VM"
require.Equal(t, expected, extractResourceNameFromMetricsURL((url)))
})
t.Run("it should return an empty string if no match is found", func(t *testing.T) {
url := "/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/Grafana-Test.VM/providers/microsoft.insights/nope-this-part-does-not-match"
expected := ""
require.Equal(t, expected, extractResourceNameFromMetricsURL((url)))
})
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.09343036264181137,
0.0031707691960036755,
0.0001664312439970672,
0.00039269053377211094,
0.010888876393437386
] |
{
"id": 3,
"code_window": [
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n",
"\t\t\tAlias: alias,\n",
"\t\t\tTimeRange: query.TimeRange,\n",
"\t\t\tFilter: filterString,\n",
"\t\t})\n",
"\t}\n",
"\n",
"\treturn azureMonitorQueries, nil\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t}\n",
"\t\tif filterString != \"\" {\n",
"\t\t\tif filterInBody {\n",
"\t\t\t\tquery.BodyFilter = filterString\n",
"\t\t\t} else {\n",
"\t\t\t\tquery.Params.Add(\"$filter\", filterString)\n",
"\t\t\t}\n",
"\t\t}\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, query)\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 183
} | #datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string,string
#group,false,false,true,true,false,false,true,true,true,true
#default,_result,,,,,,,,,
,result,table,_start,_stop,_time,_value,_field,_measurement,a,b
,,0,2020-02-17T22:19:49.747562847Z,2020-02-18T22:19:49.747562847Z,2020-02-18T10:34:08.135814545Z,1.4,f,test,1,adsfasdf
,,0,2020-02-17T22:19:49.747562847Z,2020-02-18T22:19:49.747562847Z,2020-02-18T22:08:44.850214724Z,6.6,f,test,1,adsfasdf
| pkg/tsdb/influxdb/flux/testdata/simple.csv | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001723138993838802,
0.0001723138993838802,
0.0001723138993838802,
0.0001723138993838802,
0
] |
{
"id": 3,
"code_window": [
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n",
"\t\t\tAlias: alias,\n",
"\t\t\tTimeRange: query.TimeRange,\n",
"\t\t\tFilter: filterString,\n",
"\t\t})\n",
"\t}\n",
"\n",
"\treturn azureMonitorQueries, nil\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t}\n",
"\t\tif filterString != \"\" {\n",
"\t\t\tif filterInBody {\n",
"\t\t\t\tquery.BodyFilter = filterString\n",
"\t\t\t} else {\n",
"\t\t\t\tquery.Params.Add(\"$filter\", filterString)\n",
"\t\t\t}\n",
"\t\t}\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, query)\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 183
} | // 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "custom": {
// "count": 106,
// "locationInfo": {
// "yboVMzb7z": {
// "kind": "folder",
// "name": "gdev dashboards",
// "url": "/dashboards/f/yboVMzb7z/gdev-dashboards"
// }
// },
// "sortBy": "name_sort"
// }
// }
// Name: Query results
// Dimensions: 9 Fields by 4 Rows
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location | Name: allowed_actions |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string | Type: []json.RawMessage |
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | folder | ujaM1h6nz | abc2 | | /dashboards/f/ujaM1h6nz/abc2 | null | [] | | [{"kind":"folder","uid":"ujaM1h6nz","actions":["folders.permissions:read","folders.permissions:write","folders:create","folders:delete","folders:read","folders:write"]}] |
// | dashboard | 7MeksYbmk | Alerting with TestData | | /d/7MeksYbmk/alerting-with-testdata | [ | [ | yboVMzb7z | [{"kind":"dashboard","uid":"7MeksYbmk","actions":["dashboards.permissions:read","dashboards.permissions:write","dashboards:create","dashboards:delete","dashboards:read","dashboards:write"]},{"kind":"ds","uid":"datasource-1","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]}] |
// | | | | | | "gdev", | "datasource-1" | | |
// | | | | | | "alerting" | ] | | |
// | | | | | | ] | | | |
// | dashboard | vmie2cmWz | Bar Gauge Demo | | /d/vmie2cmWz/bar-gauge-demo | [ | [ | yboVMzb7z | [{"kind":"dashboard","uid":"vmie2cmWz","actions":["dashboards.permissions:read","dashboards.permissions:write","dashboards:create","dashboards:delete","dashboards:read","dashboards:write"]},{"kind":"ds","uid":"datasource-2","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]},{"kind":"ds","uid":"datasource-3","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]},{"kind":"ds","uid":"datasource-4","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]}] |
// | | | | | | "gdev", | "datasource-2", | | |
// | | | | | | "demo" | "datasource-3", | | |
// | | | | | | ] | "datasource-4" | | |
// | | | | | | | ] | | |
// | dashboard | xMsQdBfWz | Bar Gauge Demo Unfilled | | /d/xMsQdBfWz/bar-gauge-demo-unfilled | [ | [] | yboVMzb7z | [{"kind":"dashboard","uid":"xMsQdBfWz","actions":["dashboards.permissions:read","dashboards.permissions:write","dashboards:create","dashboards:delete","dashboards:read","dashboards:write"]}] |
// | | | | | | "gdev", | | | |
// | | | | | | "demo" | | | |
// | | | | | | ] | | | |
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"refId": "Search",
"meta": {
"type": "search-results",
"custom": {
"count": 106,
"locationInfo": {
"yboVMzb7z": {
"kind": "folder",
"name": "gdev dashboards",
"url": "/dashboards/f/yboVMzb7z/gdev-dashboards"
}
},
"sortBy": "name_sort"
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "allowed_actions",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
}
]
},
"data": {
"values": [
[
"folder",
"dashboard",
"dashboard",
"dashboard"
],
[
"ujaM1h6nz",
"7MeksYbmk",
"vmie2cmWz",
"xMsQdBfWz"
],
[
"abc2",
"Alerting with TestData",
"Bar Gauge Demo",
"Bar Gauge Demo Unfilled"
],
[
"",
"",
"",
""
],
[
"/dashboards/f/ujaM1h6nz/abc2",
"/d/7MeksYbmk/alerting-with-testdata",
"/d/vmie2cmWz/bar-gauge-demo",
"/d/xMsQdBfWz/bar-gauge-demo-unfilled"
],
[
null,
[
"gdev",
"alerting"
],
[
"gdev",
"demo"
],
[
"gdev",
"demo"
]
],
[
[],
[
"datasource-1"
],
[
"datasource-2",
"datasource-3",
"datasource-4"
],
[]
],
[
"",
"yboVMzb7z",
"yboVMzb7z",
"yboVMzb7z"
],
[
[
{
"kind": "folder",
"uid": "ujaM1h6nz",
"actions": [
"folders.permissions:read",
"folders.permissions:write",
"folders:create",
"folders:delete",
"folders:read",
"folders:write"
]
}
],
[
{
"kind": "dashboard",
"uid": "7MeksYbmk",
"actions": [
"dashboards.permissions:read",
"dashboards.permissions:write",
"dashboards:create",
"dashboards:delete",
"dashboards:read",
"dashboards:write"
]
},
{
"kind": "ds",
"uid": "datasource-1",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
}
],
[
{
"kind": "dashboard",
"uid": "vmie2cmWz",
"actions": [
"dashboards.permissions:read",
"dashboards.permissions:write",
"dashboards:create",
"dashboards:delete",
"dashboards:read",
"dashboards:write"
]
},
{
"kind": "ds",
"uid": "datasource-2",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
},
{
"kind": "ds",
"uid": "datasource-3",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
},
{
"kind": "ds",
"uid": "datasource-4",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
}
],
[
{
"kind": "dashboard",
"uid": "xMsQdBfWz",
"actions": [
"dashboards.permissions:read",
"dashboards.permissions:write",
"dashboards:create",
"dashboards:delete",
"dashboards:read",
"dashboards:write"
]
}
]
]
]
}
}
]
} | pkg/services/searchV2/testdata/allowed_actions_scope_all.golden.jsonc | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017808492702897638,
0.00017216138076037169,
0.00016576053167227656,
0.00017259755986742675,
0.0000029356149298109813
] |
{
"id": 3,
"code_window": [
"\t\t\tParams: params,\n",
"\t\t\tRefID: query.RefID,\n",
"\t\t\tAlias: alias,\n",
"\t\t\tTimeRange: query.TimeRange,\n",
"\t\t\tFilter: filterString,\n",
"\t\t})\n",
"\t}\n",
"\n",
"\treturn azureMonitorQueries, nil\n",
"}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t}\n",
"\t\tif filterString != \"\" {\n",
"\t\t\tif filterInBody {\n",
"\t\t\t\tquery.BodyFilter = filterString\n",
"\t\t\t} else {\n",
"\t\t\t\tquery.Params.Add(\"$filter\", filterString)\n",
"\t\t\t}\n",
"\t\t}\n",
"\t\tazureMonitorQueries = append(azureMonitorQueries, query)\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 183
} | import { render, screen } from '@testing-library/react';
import React from 'react';
import { DataSourceHttpSettings } from '@grafana/ui';
import { HttpSettingsProps } from './types';
const setup = (propOverrides?: object) => {
const onChange = jest.fn();
const props: HttpSettingsProps = {
dataSourceConfig: {
id: 4,
uid: 'x',
orgId: 1,
name: 'gdev-influxdb',
type: 'influxdb',
typeName: 'Influxdb',
typeLogoUrl: '',
access: 'direct',
url: 'http://localhost:8086',
user: 'grafana',
database: 'site',
basicAuth: false,
basicAuthUser: '',
withCredentials: false,
isDefault: false,
jsonData: {
timeInterval: '15s',
httpMode: 'GET',
keepCookies: ['cookie1', 'cookie2'],
},
secureJsonData: {
password: true,
},
secureJsonFields: {},
readOnly: true,
},
onChange,
...propOverrides,
defaultUrl: '',
};
render(<DataSourceHttpSettings {...props} />);
return { onChange };
};
const SIGV4TestEditor = (props: { renderText: string }) => {
return <>{props.renderText}</>;
};
describe('DataSourceHttpSettings', () => {
it('should render SIGV4 label if SIGV4 is enabled', () => {
setup({ sigV4AuthToggleEnabled: true });
expect(screen.getByLabelText('SigV4 auth')).toBeInTheDocument();
});
it('should not render SIGV4 label if SIGV4 is not enabled', () => {
setup({ sigV4AuthToggleEnabled: false });
expect(screen.queryByText('SigV4 auth')).toBeNull();
});
it('should render SIGV4 editor if provided and SIGV4 is enabled', () => {
const expectedText = 'sigv4-test-editor';
setup({
sigV4AuthToggleEnabled: true,
renderSigV4Editor: <SIGV4TestEditor renderText={expectedText}></SIGV4TestEditor>,
dataSourceConfig: {
jsonData: {
sigV4Auth: true,
},
},
});
expect(screen.getByText(expectedText)).toBeInTheDocument();
});
});
| packages/grafana-ui/src/components/DataSourceSettings/DataSourceHttpSettings.test.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001757169229676947,
0.0001717395498417318,
0.0001656229142099619,
0.00017251464305445552,
0.0000032866532819753047
] |
{
"id": 4,
"code_window": [
"\t}\n",
"\n",
"\treq.URL.Path = path.Join(req.URL.Path, query.URL)\n",
"\treq.URL.RawQuery = query.Params.Encode()\n",
"\tif query.Filter != \"\" {\n",
"\t\treq.Method = http.MethodPost\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"\tif query.BodyFilter != \"\" {\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 202
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.9965035915374756,
0.019164392724633217,
0.0001642339921090752,
0.0001755257835611701,
0.13425926864147186
] |
{
"id": 4,
"code_window": [
"\t}\n",
"\n",
"\treq.URL.Path = path.Join(req.URL.Path, query.URL)\n",
"\treq.URL.RawQuery = query.Params.Encode()\n",
"\tif query.Filter != \"\" {\n",
"\t\treq.Method = http.MethodPost\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"\tif query.BodyFilter != \"\" {\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 202
} | import React from 'react';
import { HorizontalGroup, PluginSignatureBadge } from '@grafana/ui';
import { CatalogPlugin } from '../types';
import { PluginEnterpriseBadge, PluginDisabledBadge, PluginInstalledBadge, PluginUpdateAvailableBadge } from './Badges';
type PluginBadgeType = {
plugin: CatalogPlugin;
};
export function PluginListItemBadges({ plugin }: PluginBadgeType) {
if (plugin.isEnterprise) {
return (
<HorizontalGroup height="auto" wrap>
<PluginEnterpriseBadge plugin={plugin} />
{plugin.isDisabled && <PluginDisabledBadge error={plugin.error} />}
<PluginUpdateAvailableBadge plugin={plugin} />
</HorizontalGroup>
);
}
return (
<HorizontalGroup height="auto" wrap>
<PluginSignatureBadge status={plugin.signature} />
{plugin.isDisabled && <PluginDisabledBadge error={plugin.error} />}
{plugin.isInstalled && <PluginInstalledBadge />}
<PluginUpdateAvailableBadge plugin={plugin} />
</HorizontalGroup>
);
}
| public/app/features/plugins/admin/components/PluginListItemBadges.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001775217242538929,
0.0001736427948344499,
0.0001707794435787946,
0.0001731349912006408,
0.000002437279135847348
] |
{
"id": 4,
"code_window": [
"\t}\n",
"\n",
"\treq.URL.Path = path.Join(req.URL.Path, query.URL)\n",
"\treq.URL.RawQuery = query.Params.Encode()\n",
"\tif query.Filter != \"\" {\n",
"\t\treq.Method = http.MethodPost\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"\tif query.BodyFilter != \"\" {\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 202
} | #!/bin/bash
set -eo pipefail
cd grafana-mixin
go install github.com/monitoring-mixins/mixtool/cmd/mixtool
go install github.com/google/go-jsonnet/cmd/jsonnetfmt
make lint build
| scripts/mixin-check.sh | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00016939136548899114,
0.00016939136548899114,
0.00016939136548899114,
0.00016939136548899114,
0
] |
{
"id": 4,
"code_window": [
"\t}\n",
"\n",
"\treq.URL.Path = path.Join(req.URL.Path, query.URL)\n",
"\treq.URL.RawQuery = query.Params.Encode()\n",
"\tif query.Filter != \"\" {\n",
"\t\treq.Method = http.MethodPost\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"replace",
"keep"
],
"after_edit": [
"\tif query.BodyFilter != \"\" {\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 202
} | ---
aliases:
- /docs/grafana/latest/developers/plugins/metadata/
- /docs/grafana/latest/plugins/developing/plugin.json/
keywords:
- grafana
- plugins
- documentation
title: plugin.json
---
# plugin.json
The plugin.json file is required for all plugins. When Grafana starts, it scans the plugin folders and mounts every folder that contains a plugin.json file unless the folder contains a subfolder named dist. In that case, Grafana mounts the dist folder instead.
## Properties
| Property | Type | Required | Description |
| -------------------- | ----------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `dependencies` | [object](#dependencies) | **Yes** | Dependencies needed by the plugin. |
| `id` | string | **Yes** | Unique name of the plugin. If the plugin is published on grafana.com, then the plugin id has to follow the naming conventions. |
| `info` | [object](#info) | **Yes** | Metadata for the plugin. Some fields are used on the plugins page in Grafana and others on grafana.com if the plugin is published. |
| `name` | string | **Yes** | Human-readable name of the plugin that is shown to the user in the UI. |
| `type` | string | **Yes** | Plugin type. Possible values are: `app`, `datasource`, `panel`. |
| `$schema` | string | No | Schema definition for the plugin.json file. |
| `alerting` | boolean | No | For data source plugins, if the plugin supports alerting. |
| `annotations` | boolean | No | For data source plugins, if the plugin supports annotation queries. |
| `autoEnabled` | boolean | No | Set to true for app plugins that should be enabled by default in all orgs |
| `backend` | boolean | No | If the plugin has a backend component. |
| `category` | string | No | Plugin category used on the Add data source page. Possible values are: `tsdb`, `logging`, `cloud`, `tracing`, `sql`, `enterprise`, `other`. |
| `enterpriseFeatures` | [object](#enterprisefeatures) | No | Grafana Enerprise specific features. |
| `executable` | string | No | The first part of the file name of the backend component executable. There can be multiple executables built for different operating system and architecture. Grafana will check for executables named `<executable>_<$GOOS>_<lower case $GOARCH><.exe for Windows>`, e.g. `plugin_linux_amd64`. Combination of $GOOS and $GOARCH can be found here: https://golang.org/doc/install/source#environment. |
| `hiddenQueries` | boolean | No | For data source plugins, include hidden queries in the data request. |
| `includes` | [object](#includes)[] | No | Resources to include in plugin. |
| `logs` | boolean | No | For data source plugins, if the plugin supports logs. |
| `metrics` | boolean | No | For data source plugins, if the plugin supports metric queries. Used in Explore. |
| `preload` | boolean | No | Initialize plugin on startup. By default, the plugin initializes on first use. |
| `queryOptions` | [object](#queryoptions) | No | For data source plugins. There is a query options section in the plugin's query editor and these options can be turned on if needed. |
| `routes` | [object](#routes)[] | No | For data source plugins. Proxy routes used for plugin authentication and adding headers to HTTP requests made by the plugin. For more information, refer to [Authentication for data source plugins](https://grafana.com/docs/grafana/latest/developers/plugins/authentication/). |
| `skipDataQuery` | boolean | No | For panel plugins. Hides the query editor. |
| `state` | string | No | Marks a plugin as a pre-release. Possible values are: `alpha`, `beta`. |
| `streaming` | boolean | No | For data source plugins, if the plugin supports streaming. |
| `tables` | boolean | No | This is an undocumented feature. |
| `tracing` | boolean | No | For data source plugins, if the plugin supports tracing. |
## dependencies
Dependencies needed by the plugin.
### Properties
| Property | Type | Required | Description |
| ------------------- | -------------------- | -------- | ----------------------------------------------------------------------------------------------------------------------------- |
| `grafanaDependency` | string | **Yes** | Required Grafana version for this plugin. Validated using https://github.com/npm/node-semver. |
| `grafanaVersion` | string | No | (Deprecated) Required Grafana version for this plugin, e.g. `6.x.x 7.x.x` to denote plugin requires Grafana v6.x.x or v7.x.x. |
| `plugins` | [object](#plugins)[] | No | An array of required plugins on which this plugin depends. |
### plugins
Plugin dependency. Used to display information about plugin dependencies in the Grafana UI.
#### Properties
| Property | Type | Required | Description |
| --------- | ------ | -------- | -------------------------------------------------- |
| `id` | string | **Yes** | |
| `name` | string | **Yes** | |
| `type` | string | **Yes** | Possible values are: `app`, `datasource`, `panel`. |
| `version` | string | **Yes** | |
## enterpriseFeatures
Grafana Enerprise specific features.
### Properties
| Property | Type | Required | Description |
| ------------------------- | ------- | -------- | ------------------------------------------------------------------- |
| `healthDiagnosticsErrors` | boolean | No | Enable/Disable health diagnostics errors. Requires Grafana >=7.5.5. |
## includes
### Properties
| Property | Type | Required | Description |
| ------------ | ------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `addToNav` | boolean | No | Add the include to the side menu. |
| `component` | string | No | (Legacy) The Angular component to use for a page. |
| `defaultNav` | boolean | No | Page or dashboard when user clicks the icon in the side menu. |
| `icon` | string | No | Icon to use in the side menu. For information on available icon, refer to [Icons Overview](https://developers.grafana.com/ui/latest/index.html?path=/story/docs-overview-icon--icons-overview). |
| `name` | string | No | |
| `path` | string | No | Used for app plugins. |
| `role` | string | No | Possible values are: `Admin`, `Editor`, `Viewer`. |
| `type` | string | No | Possible values are: `dashboard`, `page`, `panel`, `datasource`. |
| `uid` | string | No | Unique identifier of the included resource |
## info
Metadata for the plugin. Some fields are used on the plugins page in Grafana and others on grafana.com if the plugin is published.
### Properties
| Property | Type | Required | Description |
| ------------- | ------------------------ | -------- | ----------------------------------------------------------------------------------------------------------------------------- |
| `keywords` | string[] | **Yes** | Array of plugin keywords. Used for search on grafana.com. |
| `logos` | [object](#logos) | **Yes** | SVG images that are used as plugin icons. |
| `updated` | string | **Yes** | Date when this plugin was built. |
| `version` | string | **Yes** | Project version of this commit, e.g. `6.7.x`. |
| `author` | [object](#author) | No | Information about the plugin author. |
| `build` | [object](#build) | No | Build information |
| `description` | string | No | Description of plugin. Used on the plugins page in Grafana and for search on grafana.com. |
| `links` | [object](#links)[] | No | An array of link objects to be displayed on this plugin's project page in the form `{name: 'foo', url: 'http://example.com'}` |
| `screenshots` | [object](#screenshots)[] | No | An array of screenshot objects in the form `{name: 'bar', path: 'img/screenshot.png'}` |
### author
Information about the plugin author.
#### Properties
| Property | Type | Required | Description |
| -------- | ------ | -------- | ------------------------- |
| `email` | string | No | Author's name. |
| `name` | string | No | Author's name. |
| `url` | string | No | Link to author's website. |
### build
Build information
#### Properties
| Property | Type | Required | Description |
| -------- | ------ | -------- | ---------------------------------------------------- |
| `branch` | string | No | Git branch the plugin was built from. |
| `hash` | string | No | Git hash of the commit the plugin was built from |
| `number` | number | No | |
| `pr` | number | No | GitHub pull request the plugin was built from |
| `repo` | string | No | |
| `time` | number | No | Time when the plugin was built, as a Unix timestamp. |
### links
#### Properties
| Property | Type | Required | Description |
| -------- | ------ | -------- | ----------- |
| `name` | string | No | |
| `url` | string | No | |
### logos
SVG images that are used as plugin icons.
#### Properties
| Property | Type | Required | Description |
| -------- | ------ | -------- | ---------------------------------------------------------------------------------------------------------------------------- |
| `large` | string | **Yes** | Link to the "large" version of the plugin logo, which must be an SVG image. "Large" and "small" logos can be the same image. |
| `small` | string | **Yes** | Link to the "small" version of the plugin logo, which must be an SVG image. "Large" and "small" logos can be the same image. |
### screenshots
#### Properties
| Property | Type | Required | Description |
| -------- | ------ | -------- | ----------- |
| `name` | string | No | |
| `path` | string | No | |
## queryOptions
For data source plugins. There is a query options section in the plugin's query editor and these options can be turned on if needed.
### Properties
| Property | Type | Required | Description |
| --------------- | ------- | -------- | -------------------------------------------------------------------------------------------------------------------------- |
| `cacheTimeout` | boolean | No | For data source plugins. If the `cache timeout` option should be shown in the query options section in the query editor. |
| `maxDataPoints` | boolean | No | For data source plugins. If the `max data points` option should be shown in the query options section in the query editor. |
| `minInterval` | boolean | No | For data source plugins. If the `min interval` option should be shown in the query options section in the query editor. |
## routes
For data source plugins. Proxy routes used for plugin authentication and adding headers to HTTP requests made by the plugin. For more information, refer to [Authentication for data source plugins](https://grafana.com/docs/grafana/latest/developers/plugins/authentication/).
### Properties
| Property | Type | Required | Description |
| -------------- | ----------------------- | -------- | ----------------------------------------------------------------------------------------------------------------------------------------- |
| `body` | [object](#body) | No | For data source plugins. Route headers set the body content and length to the proxied request. |
| `headers` | array | No | For data source plugins. Route headers adds HTTP headers to the proxied request. |
| `jwtTokenAuth` | [object](#jwttokenauth) | No | For data source plugins. Token authentication section used with an JWT OAuth API. |
| `method` | string | No | For data source plugins. Route method matches the HTTP verb like GET or POST. Multiple methods can be provided as a comma-separated list. |
| `path` | string | No | For data source plugins. The route path that is replaced by the route URL field when proxying the call. |
| `reqRole` | string | No | |
| `reqSignedIn` | boolean | No | |
| `tokenAuth` | [object](#tokenauth) | No | For data source plugins. Token authentication section used with an OAuth API. |
| `url` | string | No | For data source plugins. Route URL is where the request is proxied to. |
### body
For data source plugins. Route headers set the body content and length to the proxied request.
| Property | Type | Required | Description |
| -------- | ---- | -------- | ----------- |
### jwtTokenAuth
For data source plugins. Token authentication section used with an JWT OAuth API.
#### Properties
| Property | Type | Required | Description |
| -------- | ----------------- | -------- | --------------------------------------------------------------------- |
| `params` | [object](#params) | No | Parameters for the JWT token authentication request. |
| `scopes` | string[] | No | The list of scopes that your application should be granted access to. |
| `url` | string | No | URL to fetch the JWT token. |
#### params
Parameters for the JWT token authentication request.
##### Properties
| Property | Type | Required | Description |
| -------------- | ------ | -------- | ----------- |
| `client_email` | string | No | |
| `private_key` | string | No | |
| `token_uri` | string | No | |
### tokenAuth
For data source plugins. Token authentication section used with an OAuth API.
#### Properties
| Property | Type | Required | Description |
| -------- | ----------------- | -------- | --------------------------------------------------------------------- |
| `params` | [object](#params) | No | Parameters for the token authentication request. |
| `scopes` | string[] | No | The list of scopes that your application should be granted access to. |
| `url` | string | No | URL to fetch the authentication token. |
#### params
Parameters for the token authentication request.
##### Properties
| Property | Type | Required | Description |
| --------------- | ------ | -------- | ----------------------------------------------------------------------------------------- |
| `client_id` | string | No | OAuth client ID |
| `client_secret` | string | No | OAuth client secret. Usually populated by decrypting the secret from the SecureJson blob. |
| `grant_type` | string | No | OAuth grant type |
| `resource` | string | No | OAuth resource |
| docs/sources/developers/plugins/metadata.md | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0007944642566144466,
0.00019538108608685434,
0.00016179136582650244,
0.00016792496899142861,
0.00012074974802089855
] |
{
"id": 5,
"code_window": [
"\t\treq.Method = http.MethodPost\n",
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.Filter)))\n",
"\t}\n",
"\n",
"\tctx, span := tracer.Start(ctx, \"azuremonitor query\")\n",
"\tspan.SetAttributes(\"target\", query.Target, attribute.Key(\"target\").String(query.Target))\n",
"\tspan.SetAttributes(\"from\", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key(\"from\").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.BodyFilter)))\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 204
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.9982936978340149,
0.03788478299975395,
0.00015820162661839277,
0.0001737064158078283,
0.18831801414489746
] |
{
"id": 5,
"code_window": [
"\t\treq.Method = http.MethodPost\n",
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.Filter)))\n",
"\t}\n",
"\n",
"\tctx, span := tracer.Start(ctx, \"azuremonitor query\")\n",
"\tspan.SetAttributes(\"target\", query.Target, attribute.Key(\"target\").String(query.Target))\n",
"\tspan.SetAttributes(\"from\", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key(\"from\").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.BodyFilter)))\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 204
} | // 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | heat-torkel | | /pfix/d/1/ | null | [] | |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"1"
],
[
"heat-torkel"
],
[
""
],
[
"/pfix/d/1/"
],
[
null
],
[
[]
],
[
""
]
]
}
}
]
} | pkg/services/searchV2/testdata/ngram-punctuation-split.jsonc | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017642891907598823,
0.00017222754831891507,
0.00017068794113583863,
0.0001717850100249052,
0.0000014935778835933888
] |
{
"id": 5,
"code_window": [
"\t\treq.Method = http.MethodPost\n",
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.Filter)))\n",
"\t}\n",
"\n",
"\tctx, span := tracer.Start(ctx, \"azuremonitor query\")\n",
"\tspan.SetAttributes(\"target\", query.Target, attribute.Key(\"target\").String(query.Target))\n",
"\tspan.SetAttributes(\"from\", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key(\"from\").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.BodyFilter)))\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 204
} | import { DashboardCursorSync } from '@grafana/data';
import {
HideableFieldConfig,
OptionsWithLegend,
OptionsWithTimezones,
OptionsWithTooltip,
VisibilityMode,
} from '@grafana/schema';
/**
* @alpha
*/
export interface TimelineOptions extends OptionsWithLegend, OptionsWithTooltip, OptionsWithTimezones {
mode: TimelineMode; // not in the saved model!
showValue: VisibilityMode;
rowHeight: number;
// only used for "samples" mode (status-history)
colWidth?: number;
// only used in "changes" mode (state-timeline)
mergeValues?: boolean;
// only used in "changes" mode (state-timeline)
alignValue?: TimelineValueAlignment;
sync?: () => DashboardCursorSync;
getValueColor?: (frameIdx: number, fieldIdx: number, value: any) => string;
}
export type TimelineValueAlignment = 'center' | 'left' | 'right';
/**
* @alpha
*/
export interface TimelineFieldConfig extends HideableFieldConfig {
lineWidth?: number; // 0
fillOpacity?: number; // 100
}
/**
* @alpha
*/
export const defaultPanelOptions: Partial<TimelineOptions> = {
showValue: VisibilityMode.Auto,
alignValue: 'left',
mergeValues: true,
rowHeight: 0.9,
};
/**
* @alpha
*/
export const defaultTimelineFieldConfig: TimelineFieldConfig = {
lineWidth: 0,
fillOpacity: 70,
};
/**
* @alpha
*/
export enum TimelineMode {
// state-timeline
Changes = 'changes',
// status-history
Samples = 'samples',
}
| public/app/plugins/panel/state-timeline/types.ts | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001741981686791405,
0.00017276956350542605,
0.00017106432642322034,
0.00017248782387468964,
0.0000011260781320743263
] |
{
"id": 5,
"code_window": [
"\t\treq.Method = http.MethodPost\n",
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.Filter)))\n",
"\t}\n",
"\n",
"\tctx, span := tracer.Start(ctx, \"azuremonitor query\")\n",
"\tspan.SetAttributes(\"target\", query.Target, attribute.Key(\"target\").String(query.Target))\n",
"\tspan.SetAttributes(\"from\", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key(\"from\").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))\n"
],
"labels": [
"keep",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\treq.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{\"filter\": \"%s\"}`, query.BodyFilter)))\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go",
"type": "replace",
"edit_start_line_idx": 204
} | package runstream
import (
"context"
"errors"
"fmt"
"math"
"sync"
"time"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/user"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
var (
logger = log.New("live.runstream")
)
//go:generate mockgen -destination=mock.go -package=runstream github.com/grafana/grafana/pkg/services/live/runstream ChannelLocalPublisher,NumLocalSubscribersGetter,StreamRunner,PluginContextGetter
type ChannelLocalPublisher interface {
PublishLocal(channel string, data []byte) error
}
type PluginContextGetter interface {
GetPluginContext(ctx context.Context, user *user.SignedInUser, pluginID string, datasourceUID string, skipCache bool) (backend.PluginContext, bool, error)
}
type NumLocalSubscribersGetter interface {
// GetNumSubscribers returns number of channel subscribers throughout all nodes.
GetNumLocalSubscribers(channel string) (int, error)
}
type StreamRunner interface {
RunStream(ctx context.Context, request *backend.RunStreamRequest, sender *backend.StreamSender) error
}
type packetSender struct {
channelLocalPublisher ChannelLocalPublisher
channel string
}
func (p *packetSender) Send(packet *backend.StreamPacket) error {
return p.channelLocalPublisher.PublishLocal(p.channel, packet.Data)
}
// Manager manages streams from Grafana to plugins (i.e. RunStream method).
type Manager struct {
mu sync.RWMutex
baseCtx context.Context
streams map[string]streamContext
datasourceStreams map[string]map[string]struct{}
presenceGetter NumLocalSubscribersGetter
pluginContextGetter PluginContextGetter
channelSender ChannelLocalPublisher
registerCh chan submitRequest
closedCh chan struct{}
checkInterval time.Duration
maxChecks int
datasourceCheckInterval time.Duration
}
// ManagerOption modifies Manager behavior (used for tests for example).
type ManagerOption func(*Manager)
// WithCheckConfig allows setting custom check rules.
func WithCheckConfig(interval time.Duration, maxChecks int) ManagerOption {
return func(sm *Manager) {
sm.checkInterval = interval
sm.maxChecks = maxChecks
}
}
const (
defaultCheckInterval = 5 * time.Second
defaultDatasourceCheckInterval = time.Minute
defaultMaxChecks = 3
)
// NewManager creates new Manager.
func NewManager(channelSender ChannelLocalPublisher, presenceGetter NumLocalSubscribersGetter, pluginContextGetter PluginContextGetter, opts ...ManagerOption) *Manager {
sm := &Manager{
streams: make(map[string]streamContext),
datasourceStreams: map[string]map[string]struct{}{},
channelSender: channelSender,
presenceGetter: presenceGetter,
pluginContextGetter: pluginContextGetter,
registerCh: make(chan submitRequest),
closedCh: make(chan struct{}),
checkInterval: defaultCheckInterval,
maxChecks: defaultMaxChecks,
datasourceCheckInterval: defaultDatasourceCheckInterval,
}
for _, opt := range opts {
opt(sm)
}
return sm
}
func (s *Manager) HandleDatasourceDelete(orgID int64, dsUID string) error {
return s.handleDatasourceEvent(orgID, dsUID, false)
}
func (s *Manager) HandleDatasourceUpdate(orgID int64, dsUID string) error {
return s.handleDatasourceEvent(orgID, dsUID, true)
}
func (s *Manager) handleDatasourceEvent(orgID int64, dsUID string, resubmit bool) error {
dsKey := datasourceKey(orgID, dsUID)
s.mu.RLock()
dsStreams, ok := s.datasourceStreams[dsKey]
if !ok {
s.mu.RUnlock()
return nil
}
var resubmitRequests []streamRequest
var waitChannels []chan struct{}
for channel := range dsStreams {
streamCtx, ok := s.streams[channel]
if !ok {
continue
}
streamCtx.cancelFn()
waitChannels = append(waitChannels, streamCtx.CloseCh)
resubmitRequests = append(resubmitRequests, streamCtx.streamRequest)
}
s.mu.RUnlock()
// Wait for all streams to stop.
for _, ch := range waitChannels {
<-ch
}
if resubmit {
// Re-submit streams.
for _, sr := range resubmitRequests {
_, err := s.SubmitStream(s.baseCtx, sr.user, sr.Channel, sr.Path, sr.Data, sr.PluginContext, sr.StreamRunner, true)
if err != nil {
// Log error but do not prevent execution of caller routine.
logger.Error("Error re-submitting stream", "path", sr.Path, "error", err)
}
}
}
return nil
}
func datasourceKey(orgID int64, dsUID string) string {
return fmt.Sprintf("%d_%s", orgID, dsUID)
}
func (s *Manager) stopStream(sr streamRequest, cancelFn func()) {
s.mu.Lock()
defer s.mu.Unlock()
streamCtx, ok := s.streams[sr.Channel]
if !ok {
return
}
closeCh := streamCtx.CloseCh
delete(s.streams, sr.Channel)
if sr.PluginContext.DataSourceInstanceSettings != nil {
dsUID := sr.PluginContext.DataSourceInstanceSettings.UID
dsKey := datasourceKey(sr.PluginContext.OrgID, dsUID)
delete(s.datasourceStreams[dsKey], sr.Channel)
}
cancelFn()
close(closeCh)
}
func (s *Manager) watchStream(ctx context.Context, cancelFn func(), sr streamRequest) {
numNoSubscribersChecks := 0
presenceTicker := time.NewTicker(s.checkInterval)
defer presenceTicker.Stop()
datasourceTicker := time.NewTicker(s.datasourceCheckInterval)
defer datasourceTicker.Stop()
for {
select {
case <-ctx.Done():
return
case <-datasourceTicker.C:
if sr.PluginContext.DataSourceInstanceSettings != nil {
dsUID := sr.PluginContext.DataSourceInstanceSettings.UID
pCtx, ok, err := s.pluginContextGetter.GetPluginContext(ctx, sr.user, sr.PluginContext.PluginID, dsUID, false)
if err != nil {
logger.Error("Error getting datasource context", "channel", sr.Channel, "path", sr.Path, "error", err)
continue
}
if !ok {
logger.Debug("Datasource not found, stop stream", "channel", sr.Channel, "path", sr.Path)
return
}
if pCtx.DataSourceInstanceSettings.Updated != sr.PluginContext.DataSourceInstanceSettings.Updated {
logger.Debug("Datasource changed, re-establish stream", "channel", sr.Channel, "path", sr.Path)
err := s.HandleDatasourceUpdate(pCtx.OrgID, dsUID)
if err != nil {
logger.Error("Error re-establishing stream", "channel", sr.Channel, "path", sr.Path, "error", err)
continue
}
return
}
}
case <-presenceTicker.C:
numSubscribers, err := s.presenceGetter.GetNumLocalSubscribers(sr.Channel)
if err != nil {
logger.Error("Error checking num subscribers", "channel", sr.Channel, "path", sr.Path, "error", err)
continue
}
if numSubscribers > 0 {
// reset counter since channel has active subscribers.
numNoSubscribersChecks = 0
continue
}
numNoSubscribersChecks++
if numNoSubscribersChecks >= s.maxChecks {
logger.Debug("Stop stream since no active subscribers", "channel", sr.Channel, "path", sr.Path)
s.stopStream(sr, cancelFn)
return
}
}
}
}
const streamDurationThreshold = 100 * time.Millisecond
const coolDownDelay = 100 * time.Millisecond
const maxDelay = 5 * time.Second
func getDelay(numErrors int) time.Duration {
if numErrors == 0 {
return 0
}
delay := coolDownDelay * time.Duration(math.Pow(2, float64(numErrors)))
if delay > maxDelay {
return maxDelay
}
return delay
}
// run stream until context canceled or stream finished without an error.
func (s *Manager) runStream(ctx context.Context, cancelFn func(), sr streamRequest) {
defer func() { s.stopStream(sr, cancelFn) }()
var numFastErrors int
var delay time.Duration
var isReconnect bool
startTime := time.Now()
for {
select {
case <-ctx.Done():
return
default:
}
pluginCtx := sr.PluginContext
if isReconnect {
// Best effort to cool down re-establishment process. We don't have a
// nice way to understand whether we really need to wait here - so relying
// on duration time of running a stream.
if time.Since(startTime) < streamDurationThreshold {
if delay < maxDelay {
// Due to not calling getDelay after we have delay larger than maxDelay
// we avoid possible float overflow errors while calculating delay duration
// based on numFastErrors.
delay = getDelay(numFastErrors)
}
numFastErrors++
} else {
// Assuming that stream successfully started.
delay = 0
numFastErrors = 0
}
select {
case <-ctx.Done():
return
case <-time.After(delay):
}
startTime = time.Now()
// Resolve new plugin context as it could be modified since last call.
// We are using the same user here which initiated stream originally.
var datasourceUID string
if pluginCtx.DataSourceInstanceSettings != nil {
datasourceUID = pluginCtx.DataSourceInstanceSettings.UID
}
newPluginCtx, ok, err := s.pluginContextGetter.GetPluginContext(ctx, sr.user, pluginCtx.PluginID, datasourceUID, false)
if err != nil {
logger.Error("Error getting plugin context", "path", sr.Path, "error", err)
isReconnect = true
continue
}
if !ok {
logger.Info("No plugin context found, stopping stream", "path", sr.Path)
return
}
pluginCtx = newPluginCtx
}
err := sr.StreamRunner.RunStream(
ctx,
&backend.RunStreamRequest{
PluginContext: pluginCtx,
Path: sr.Path,
Data: sr.Data,
},
backend.NewStreamSender(&packetSender{channelLocalPublisher: s.channelSender, channel: sr.Channel}),
)
if err != nil {
if errors.Is(ctx.Err(), context.Canceled) {
logger.Debug("Stream cleanly finished", "path", sr.Path)
return
}
logger.Error("Error running stream, re-establishing", "path", sr.Path, "error", err, "wait", delay)
isReconnect = true
continue
}
logger.Debug("Stream finished without error, stopping it", "path", sr.Path)
return
}
}
var errClosed = errors.New("stream manager closed")
type streamContext struct {
CloseCh chan struct{}
cancelFn func()
streamRequest streamRequest
}
func (s *Manager) registerStream(ctx context.Context, sr submitRequest) {
s.mu.Lock()
if streamCtx, ok := s.streams[sr.streamRequest.Channel]; ok {
s.mu.Unlock()
sr.responseCh <- submitResponse{Result: submitResult{StreamExists: true, CloseNotify: streamCtx.CloseCh}}
return
}
ctx, cancel := context.WithCancel(ctx)
defer cancel()
closeCh := make(chan struct{})
s.streams[sr.streamRequest.Channel] = streamContext{
CloseCh: closeCh,
cancelFn: cancel,
streamRequest: sr.streamRequest,
}
if sr.streamRequest.PluginContext.DataSourceInstanceSettings != nil {
dsUID := sr.streamRequest.PluginContext.DataSourceInstanceSettings.UID
dsKey := datasourceKey(sr.streamRequest.PluginContext.OrgID, dsUID)
if _, ok := s.datasourceStreams[dsKey]; !ok {
s.datasourceStreams[dsKey] = map[string]struct{}{}
}
s.datasourceStreams[dsKey][sr.streamRequest.Channel] = struct{}{}
}
s.mu.Unlock()
sr.responseCh <- submitResponse{Result: submitResult{StreamExists: false, CloseNotify: closeCh}}
go s.watchStream(ctx, cancel, sr.streamRequest)
s.runStream(ctx, cancel, sr.streamRequest)
}
// Run Manager till context canceled.
func (s *Manager) Run(ctx context.Context) error {
s.baseCtx = ctx
for {
select {
case sr := <-s.registerCh:
go s.registerStream(ctx, sr)
case <-ctx.Done():
close(s.closedCh)
return ctx.Err()
}
}
}
type streamRequest struct {
Channel string
Path string
user *user.SignedInUser
PluginContext backend.PluginContext
StreamRunner StreamRunner
Data []byte
}
type submitRequest struct {
responseCh chan submitResponse
streamRequest streamRequest
}
type submitResult struct {
// StreamExists tells whether stream have been already opened.
StreamExists bool
// CloseNotify will be closed as soon as stream cleanly exited.
CloseNotify chan struct{}
}
type submitResponse struct {
Error error
Result submitResult
}
var errDatasourceNotFound = errors.New("datasource not found")
// SubmitStream submits stream handler in Manager to manage.
// The stream will be opened and kept till channel has active subscribers.
func (s *Manager) SubmitStream(ctx context.Context, user *user.SignedInUser, channel string, path string, data []byte, pCtx backend.PluginContext, streamRunner StreamRunner, isResubmit bool) (*submitResult, error) {
if isResubmit {
// Resolve new plugin context as it could be modified since last call.
var datasourceUID string
if pCtx.DataSourceInstanceSettings != nil {
datasourceUID = pCtx.DataSourceInstanceSettings.UID
}
newPluginCtx, ok, err := s.pluginContextGetter.GetPluginContext(ctx, user, pCtx.PluginID, datasourceUID, false)
if err != nil {
return nil, err
}
if !ok {
return nil, errDatasourceNotFound
}
pCtx = newPluginCtx
}
req := submitRequest{
responseCh: make(chan submitResponse, 1),
streamRequest: streamRequest{
user: user,
Channel: channel,
Path: path,
PluginContext: pCtx,
StreamRunner: streamRunner,
Data: data,
},
}
// Send submit request.
select {
case s.registerCh <- req:
case <-s.closedCh:
close(s.registerCh)
return nil, errClosed
case <-ctx.Done():
return nil, ctx.Err()
}
// Wait for submit response.
select {
case resp := <-req.responseCh:
if resp.Error != nil {
return nil, resp.Error
}
return &resp.Result, nil
case <-s.closedCh:
return nil, errClosed
case <-ctx.Done():
return nil, ctx.Err()
}
}
| pkg/services/live/runstream/manager.go | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.001888257684186101,
0.00023632867669221014,
0.00016473117284476757,
0.00017115124501287937,
0.00025920526240952313
] |
{
"id": 6,
"code_window": [
"\t\"github.com/google/go-cmp/cmp/cmpopts\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/backend\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/data\"\n",
"\t\"github.com/stretchr/testify/require\"\n",
"\tptr \"github.com/xorcare/pointer\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/components/simplejson\"\n",
"\t\"github.com/grafana/grafana/pkg/infra/log\"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"github.com/stretchr/testify/assert\"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 17
} | package types
import (
"errors"
"fmt"
"net/http"
"net/url"
"regexp"
"strings"
"time"
"github.com/grafana/grafana-azure-sdk-go/azcredentials"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
const (
TimeSeries = "time_series"
)
var (
LegendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
)
type AzRoute struct {
URL string
Scopes []string
Headers map[string]string
}
type AzureMonitorSettings struct {
SubscriptionId string `json:"subscriptionId"`
LogAnalyticsDefaultWorkspace string `json:"logAnalyticsDefaultWorkspace"`
AppInsightsAppId string `json:"appInsightsAppId"`
}
// AzureMonitorCustomizedCloudSettings is the extended Azure Monitor settings for customized cloud
type AzureMonitorCustomizedCloudSettings struct {
CustomizedRoutes map[string]AzRoute `json:"customizedRoutes"`
}
type DatasourceService struct {
URL string
HTTPClient *http.Client
}
type DatasourceInfo struct {
Cloud string
Credentials azcredentials.AzureCredentials
Settings AzureMonitorSettings
Routes map[string]AzRoute
Services map[string]DatasourceService
JSONData map[string]interface{}
DecryptedSecureJSONData map[string]string
DatasourceID int64
OrgID int64
}
// AzureMonitorQuery is the query for all the services as they have similar queries
// with a url, a querystring and an alias field
type AzureMonitorQuery struct {
URL string
Target string
Params url.Values
RefID string
Alias string
TimeRange backend.TimeRange
Filter string
}
// AzureMonitorResponse is the json response from the Azure Monitor API
type AzureMonitorResponse struct {
Cost int `json:"cost"`
Timespan string `json:"timespan"`
Interval string `json:"interval"`
Value []struct {
ID string `json:"id"`
Type string `json:"type"`
Name struct {
Value string `json:"value"`
LocalizedValue string `json:"localizedValue"`
} `json:"name"`
Unit string `json:"unit"`
Timeseries []struct {
Metadatavalues []struct {
Name struct {
Value string `json:"value"`
LocalizedValue string `json:"localizedValue"`
} `json:"name"`
Value string `json:"value"`
} `json:"metadatavalues"`
Data []struct {
TimeStamp time.Time `json:"timeStamp"`
Average *float64 `json:"average,omitempty"`
Total *float64 `json:"total,omitempty"`
Count *float64 `json:"count,omitempty"`
Maximum *float64 `json:"maximum,omitempty"`
Minimum *float64 `json:"minimum,omitempty"`
} `json:"data"`
} `json:"timeseries"`
} `json:"value"`
Namespace string `json:"namespace"`
Resourceregion string `json:"resourceregion"`
}
// AzureResponseTable is the table format for Azure responses
type AzureResponseTable struct {
Name string `json:"name"`
Columns []struct {
Name string `json:"name"`
Type string `json:"type"`
} `json:"columns"`
Rows [][]interface{} `json:"rows"`
}
type AzureMonitorResource struct {
ResourceGroup string `json:"resourceGroup"`
ResourceName string `json:"resourceName"`
}
// AzureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type AzureMonitorJSONQuery struct {
AzureMonitor struct {
ResourceURI string `json:"resourceUri"`
// These are used to reconstruct a resource URI
MetricNamespace string `json:"metricNamespace"`
CustomNamespace string `json:"customNamespace"`
MetricName string `json:"metricName"`
Region string `json:"region"`
Resources []AzureMonitorResource `json:"resources"`
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
DimensionFilters []AzureMonitorDimensionFilter `json:"dimensionFilters"` // new model
TimeGrain string `json:"timeGrain"`
Top string `json:"top"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimension string `json:"dimension"` // old model
DimensionFilter string `json:"dimensionFilter"` // old model
Format string `json:"format"`
// Deprecated, MetricNamespace should be used instead
MetricDefinition string `json:"metricDefinition"`
// Deprecated: Use Resources with a single element instead
AzureMonitorResource
} `json:"azureMonitor"`
Subscription string `json:"subscription"`
}
// AzureMonitorDimensionFilter is the model for the frontend sent for azureMonitor metric
// queries like "BlobType", "eq", "*"
type AzureMonitorDimensionFilter struct {
Dimension string `json:"dimension"`
Operator string `json:"operator"`
Filters []string `json:"filters,omitempty"`
// Deprecated: To support multiselection, filters are passed in a slice now. Also migrated in frontend.
Filter *string `json:"filter,omitempty"`
}
func (a AzureMonitorDimensionFilter) ConstructFiltersString() string {
var filterStrings []string
for _, filter := range a.Filters {
filterStrings = append(filterStrings, fmt.Sprintf("%v %v '%v'", a.Dimension, a.Operator, filter))
}
if a.Operator == "eq" {
return strings.Join(filterStrings, " or ")
} else {
return strings.Join(filterStrings, " and ")
}
}
// LogJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type LogJSONQuery struct {
AzureLogAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
Resource string `json:"resource"`
// Deprecated: Queries should be migrated to use Resource instead
Workspace string `json:"workspace"`
} `json:"azureLogAnalytics"`
}
// MetricChartDefinition is the JSON model for a metrics chart definition
type MetricChartDefinition struct {
ResourceMetadata map[string]string `json:"resourceMetadata"`
Name string `json:"name"`
AggregationType int `json:"aggregationType"`
Namespace string `json:"namespace"`
MetricVisualization MetricVisualization `json:"metricVisualization"`
}
// MetricVisualization is the JSON model for the visualization field of a
// metricChartDefinition
type MetricVisualization struct {
DisplayName string `json:"displayName"`
ResourceDisplayName string `json:"resourceDisplayName"`
}
type ServiceProxy interface {
Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter
}
type LogAnalyticsWorkspaceFeatures struct {
EnableLogAccessUsingOnlyResourcePermissions bool `json:"enableLogAccessUsingOnlyResourcePermissions"`
Legacy int `json:"legacy"`
SearchVersion int `json:"searchVersion"`
}
type LogAnalyticsWorkspaceProperties struct {
CreatedDate string `json:"createdDate"`
CustomerId string `json:"customerId"`
Features LogAnalyticsWorkspaceFeatures `json:"features"`
}
type LogAnalyticsWorkspaceResponse struct {
Id string `json:"id"`
Location string `json:"location"`
Name string `json:"name"`
Properties LogAnalyticsWorkspaceProperties `json:"properties"`
ProvisioningState string `json:"provisioningState"`
PublicNetworkAccessForIngestion string `json:"publicNetworkAccessForIngestion"`
PublicNetworkAccessForQuery string `json:"publicNetworkAccessForQuery"`
RetentionInDays int `json:"retentionInDays"`
}
var ErrorAzureHealthCheck = errors.New("health check failed")
| pkg/tsdb/azuremonitor/types/types.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.009073998779058456,
0.0007289840141311288,
0.00016257277457043529,
0.00016901061462704092,
0.0018950204830616713
] |
{
"id": 6,
"code_window": [
"\t\"github.com/google/go-cmp/cmp/cmpopts\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/backend\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/data\"\n",
"\t\"github.com/stretchr/testify/require\"\n",
"\tptr \"github.com/xorcare/pointer\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/components/simplejson\"\n",
"\t\"github.com/grafana/grafana/pkg/infra/log\"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"github.com/stretchr/testify/assert\"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 17
} | <svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" viewBox="0 0 24 24"><path d="M7.4,7.2c0.1,0,0.2,0,0.2,0l1.9-0.5c0.5-0.1,0.9-0.7,0.7-1.2c-0.1-0.5-0.7-0.9-1.2-0.7L7.1,5.2c-0.4,0.1-0.8,0.5-0.8,1C6.4,6.7,6.8,7.2,7.4,7.2z M9.5,9h-2c-0.6,0-1,0.4-1,1s0.4,1,1,1h2c0.6,0,1-0.4,1-1S10.1,9,9.5,9z M13.2,5.7c0.1,0,0.2,0,0.2,0l1.9-0.5c0,0,0,0,0,0C15.9,5,16.3,4.5,16.1,4c-0.1-0.5-0.7-0.9-1.2-0.7L13,3.7c-0.4,0.1-0.8,0.5-0.8,1C12.2,5.2,12.7,5.7,13.2,5.7z M13.5,9c-0.6,0-1,0.4-1,1s0.4,1,1,1h2c0.6,0,1-0.4,1-1s-0.4-1-1-1H13.5z M21,2.8c-0.1-0.5-0.7-0.9-1.2-0.7l-1,0.2c-0.4,0.1-0.8,0.5-0.8,1c0,0.6,0.4,1,1,1C19.1,4.7,19.5,5,20,5h0c0.6,0,1-0.4,1-1V3C21,2.9,21,2.8,21,2.8z M4,10.5L4,10.5c0.6,0,1-0.4,1-1v-2c0-0.6-0.4-1-1-1S3,7,3,7.5v2C3,10.1,3.4,10.5,4,10.5z M20,7c-0.6,0-1,0.4-1,1v1.1c-0.3,0.2-0.5,0.5-0.5,0.9c0,0.6,0.4,1,1,1H20c0.6,0,1-0.4,1-1V8C21,7.4,20.6,7,20,7z M20,13H4c-0.6,0-1,0.4-1,1v3c0,0.5,0.3,0.9,0.8,1l16,4c0.1,0,0.2,0,0.2,0c0.6,0,1-0.4,1-1v-7C21,13.4,20.6,13,20,13z"/></svg> | public/img/icons/solid/flip-h-alt.svg | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00016396860883105546,
0.00016396860883105546,
0.00016396860883105546,
0.00016396860883105546,
0
] |
{
"id": 6,
"code_window": [
"\t\"github.com/google/go-cmp/cmp/cmpopts\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/backend\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/data\"\n",
"\t\"github.com/stretchr/testify/require\"\n",
"\tptr \"github.com/xorcare/pointer\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/components/simplejson\"\n",
"\t\"github.com/grafana/grafana/pkg/infra/log\"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"github.com/stretchr/testify/assert\"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 17
} | import { render, screen } from '@testing-library/react';
import React from 'react';
import { createTheme } from '@grafana/data';
import { BigValue, BigValueColorMode, BigValueGraphMode, Props } from './BigValue';
function getProps(propOverrides?: Partial<Props>): Props {
const props: Props = {
colorMode: BigValueColorMode.Background,
graphMode: BigValueGraphMode.Line,
height: 300,
width: 300,
value: {
text: '25',
numeric: 25,
color: 'red',
},
theme: createTheme(),
};
Object.assign(props, propOverrides);
return props;
}
describe('BigValue', () => {
describe('Render with basic options', () => {
it('should render', () => {
render(<BigValue {...getProps()} />);
expect(screen.getByText('25')).toBeInTheDocument();
});
});
});
| packages/grafana-ui/src/components/BigValue/BigValue.test.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017623775056563318,
0.0001720186701277271,
0.0001687434851191938,
0.00017154670786112547,
0.000003232480139558902
] |
{
"id": 6,
"code_window": [
"\t\"github.com/google/go-cmp/cmp/cmpopts\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/backend\"\n",
"\t\"github.com/grafana/grafana-plugin-sdk-go/data\"\n",
"\t\"github.com/stretchr/testify/require\"\n",
"\tptr \"github.com/xorcare/pointer\"\n",
"\n",
"\t\"github.com/grafana/grafana/pkg/components/simplejson\"\n",
"\t\"github.com/grafana/grafana/pkg/infra/log\"\n"
],
"labels": [
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\"github.com/stretchr/testify/assert\"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 17
} | /**
* Used to enable rendering of Angular components within a
* React component without losing proper typings.
*
* @example
* ```typescript
* class Component extends PureComponent<Props> {
* element: HTMLElement;
* angularComponent: AngularComponent;
*
* componentDidMount() {
* const template = '<angular-component />' // angular template here;
* const scopeProps = { ctrl: angularController }; // angular scope properties here
* const loader = getAngularLoader();
* this.angularComponent = loader.load(this.element, scopeProps, template);
* }
*
* componentWillUnmount() {
* if (this.angularComponent) {
* this.angularComponent.destroy();
* }
* }
*
* render() {
* return (
* <div ref={element => (this.element = element)} />
* );
* }
* }
* ```
*
* @public
*/
export interface AngularComponent {
/**
* Should be called when the React component will unmount.
*/
destroy(): void;
/**
* Can be used to trigger a re-render of the Angular component.
*/
digest(): void;
/**
* Used to access the Angular scope from the React component.
*/
getScope(): any;
}
/**
* Used to load an Angular component from the context of a React component.
* Please see the {@link AngularComponent} for a proper example.
*
* @public
*/
export interface AngularLoader {
/**
*
* @param elem - the element that the Angular component will be loaded into.
* @param scopeProps - values that will be accessed via the Angular scope.
* @param template - template used by the Angular component.
*/
load(elem: any, scopeProps: any, template: string): AngularComponent;
}
let instance: AngularLoader;
/**
* Used during startup by Grafana to set the AngularLoader so it is available
* via the {@link getAngularLoader} to the rest of the application.
*
* @internal
*/
export function setAngularLoader(v: AngularLoader) {
instance = v;
}
/**
* Used to retrieve the {@link AngularLoader} that enables the use of Angular
* components within a React component.
*
* Please see the {@link AngularComponent} for a proper example.
*
* @public
*/
export function getAngularLoader(): AngularLoader {
return instance;
}
| packages/grafana-runtime/src/services/AngularLoader.ts | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017243700858671218,
0.00016632450569886714,
0.00016052498540375382,
0.00016621507529634982,
0.000003939137968700379
] |
{
"id": 7,
"code_window": [
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t},\n",
"\t\t{\n",
"\t\t\tname: \"includes a resource as a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t{\n",
"\t\t\tname: \"Includes a region and a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n",
"\t\t\t\t\"timeGrain\": \"PT1M\",\n",
"\t\t\t\t\"top\": \"10\",\n",
"\t\t\t\t\"region\": \"westus\",\n",
"\t\t\t\t\"resources\": []types.AzureMonitorResource{{ResourceGroup: \"rg\", ResourceName: \"vm\"}},\n",
"\t\t\t},\n",
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t\texpectedFilter: \"Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm'\",\n",
"\t\t},\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 210
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.010275200009346008,
0.0009374014334753156,
0.00016200718528125435,
0.00018185738008469343,
0.0018324123229831457
] |
{
"id": 7,
"code_window": [
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t},\n",
"\t\t{\n",
"\t\t\tname: \"includes a resource as a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t{\n",
"\t\t\tname: \"Includes a region and a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n",
"\t\t\t\t\"timeGrain\": \"PT1M\",\n",
"\t\t\t\t\"top\": \"10\",\n",
"\t\t\t\t\"region\": \"westus\",\n",
"\t\t\t\t\"resources\": []types.AzureMonitorResource{{ResourceGroup: \"rg\", ResourceName: \"vm\"}},\n",
"\t\t\t},\n",
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t\texpectedFilter: \"Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm'\",\n",
"\t\t},\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 210
} | <svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" viewBox="0 0 24 24"><path d="M19,2H5C3.3,2,2,3.3,2,5v14c0,1.7,1.3,3,3,3h14c1.7,0,3-1.3,3-3V5C22,3.3,20.7,2,19,2z M16,17c0,0.6-0.4,1-1,1s-1-0.4-1-1v-4h-4v4c0,0.6-0.4,1-1,1s-1-0.4-1-1V7c0-0.6,0.4-1,1-1s1,0.4,1,1v4h4V7c0-0.6,0.4-1,1-1s1,0.4,1,1V17z"/></svg> | public/img/icons/solid/hospital-square-sign.svg | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017320770712103695,
0.00017320770712103695,
0.00017320770712103695,
0.00017320770712103695,
0
] |
{
"id": 7,
"code_window": [
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t},\n",
"\t\t{\n",
"\t\t\tname: \"includes a resource as a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t{\n",
"\t\t\tname: \"Includes a region and a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n",
"\t\t\t\t\"timeGrain\": \"PT1M\",\n",
"\t\t\t\t\"top\": \"10\",\n",
"\t\t\t\t\"region\": \"westus\",\n",
"\t\t\t\t\"resources\": []types.AzureMonitorResource{{ResourceGroup: \"rg\", ResourceName: \"vm\"}},\n",
"\t\t\t},\n",
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t\texpectedFilter: \"Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm'\",\n",
"\t\t},\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 210
} | package commentmodel
import (
"context"
"strconv"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/annotations"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/guardian"
"github.com/grafana/grafana/pkg/services/user"
)
type PermissionChecker struct {
sqlStore db.DB
features featuremgmt.FeatureToggles
accessControl accesscontrol.AccessControl
dashboardService dashboards.DashboardService
annotationsRepo annotations.Repository
}
func NewPermissionChecker(sqlStore db.DB, features featuremgmt.FeatureToggles,
accessControl accesscontrol.AccessControl, dashboardService dashboards.DashboardService,
annotationsRepo annotations.Repository,
) *PermissionChecker {
return &PermissionChecker{sqlStore: sqlStore, features: features, accessControl: accessControl, annotationsRepo: annotationsRepo}
}
func (c *PermissionChecker) getDashboardByUid(ctx context.Context, orgID int64, uid string) (*models.Dashboard, error) {
query := models.GetDashboardQuery{Uid: uid, OrgId: orgID}
if err := c.dashboardService.GetDashboard(ctx, &query); err != nil {
return nil, err
}
return query.Result, nil
}
func (c *PermissionChecker) getDashboardById(ctx context.Context, orgID int64, id int64) (*models.Dashboard, error) {
query := models.GetDashboardQuery{Id: id, OrgId: orgID}
if err := c.dashboardService.GetDashboard(ctx, &query); err != nil {
return nil, err
}
return query.Result, nil
}
func (c *PermissionChecker) CheckReadPermissions(ctx context.Context, orgId int64, signedInUser *user.SignedInUser, objectType string, objectID string) (bool, error) {
switch objectType {
case ObjectTypeOrg:
return false, nil
case ObjectTypeDashboard:
if !c.features.IsEnabled(featuremgmt.FlagDashboardComments) {
return false, nil
}
dash, err := c.getDashboardByUid(ctx, orgId, objectID)
if err != nil {
return false, err
}
guard := guardian.New(ctx, dash.Id, orgId, signedInUser)
if ok, err := guard.CanView(); err != nil || !ok {
return false, nil
}
case ObjectTypeAnnotation:
if !c.features.IsEnabled(featuremgmt.FlagAnnotationComments) {
return false, nil
}
annotationID, err := strconv.ParseInt(objectID, 10, 64)
if err != nil {
return false, nil
}
items, err := c.annotationsRepo.Find(ctx, &annotations.ItemQuery{AnnotationId: annotationID, OrgId: orgId, SignedInUser: signedInUser})
if err != nil || len(items) != 1 {
return false, nil
}
dashboardID := items[0].DashboardId
if dashboardID == 0 {
return false, nil
}
dash, err := c.getDashboardById(ctx, orgId, dashboardID)
if err != nil {
return false, err
}
guard := guardian.New(ctx, dash.Id, orgId, signedInUser)
if ok, err := guard.CanView(); err != nil || !ok {
return false, nil
}
default:
return false, nil
}
return true, nil
}
func (c *PermissionChecker) CheckWritePermissions(ctx context.Context, orgId int64, signedInUser *user.SignedInUser, objectType string, objectID string) (bool, error) {
switch objectType {
case ObjectTypeOrg:
return false, nil
case ObjectTypeDashboard:
if !c.features.IsEnabled(featuremgmt.FlagDashboardComments) {
return false, nil
}
dash, err := c.getDashboardByUid(ctx, orgId, objectID)
if err != nil {
return false, err
}
guard := guardian.New(ctx, dash.Id, orgId, signedInUser)
if ok, err := guard.CanEdit(); err != nil || !ok {
return false, nil
}
case ObjectTypeAnnotation:
if !c.features.IsEnabled(featuremgmt.FlagAnnotationComments) {
return false, nil
}
if !c.accessControl.IsDisabled() {
evaluator := accesscontrol.EvalPermission(accesscontrol.ActionAnnotationsWrite, accesscontrol.ScopeAnnotationsTypeDashboard)
if canEdit, err := c.accessControl.Evaluate(ctx, signedInUser, evaluator); err != nil || !canEdit {
return canEdit, err
}
}
annotationID, err := strconv.ParseInt(objectID, 10, 64)
if err != nil {
return false, nil
}
items, err := c.annotationsRepo.Find(ctx, &annotations.ItemQuery{AnnotationId: annotationID, OrgId: orgId, SignedInUser: signedInUser})
if err != nil || len(items) != 1 {
return false, nil
}
dashboardID := items[0].DashboardId
if dashboardID == 0 {
return false, nil
}
dash, err := c.getDashboardById(ctx, orgId, dashboardID)
if err != nil {
return false, nil
}
guard := guardian.New(ctx, dash.Id, orgId, signedInUser)
if ok, err := guard.CanEdit(); err != nil || !ok {
return false, nil
}
default:
return false, nil
}
return true, nil
}
| pkg/services/comments/commentmodel/permissions.go | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017553790530655533,
0.00017225368355866522,
0.00016167585272341967,
0.00017309428949374706,
0.00000345242619914643
] |
{
"id": 7,
"code_window": [
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t},\n",
"\t\t{\n",
"\t\t\tname: \"includes a resource as a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n"
],
"labels": [
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t{\n",
"\t\t\tname: \"Includes a region and a filter\",\n",
"\t\t\tazureMonitorVariedProperties: map[string]interface{}{\n",
"\t\t\t\t\"timeGrain\": \"PT1M\",\n",
"\t\t\t\t\"top\": \"10\",\n",
"\t\t\t\t\"region\": \"westus\",\n",
"\t\t\t\t\"resources\": []types.AzureMonitorResource{{ResourceGroup: \"rg\", ResourceName: \"vm\"}},\n",
"\t\t\t},\n",
"\t\t\texpectedInterval: \"PT1M\",\n",
"\t\t\tazureMonitorQueryTarget: \"aggregation=Average&api-version=2021-05-01&interval=PT1M&metricnames=Percentage+CPU&metricnamespace=Microsoft.Compute%2FvirtualMachines®ion=westus×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z\",\n",
"\t\t\texpectedURL: \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/providers/microsoft.insights/metrics\",\n",
"\t\t\texpectedFilter: \"Microsoft.ResourceId eq '/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/rg/providers/Microsoft.Compute/virtualMachines/vm'\",\n",
"\t\t},\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 210
} | ---
aliases:
- /docs/grafana/next/dashboards/build-dashboards/
title: Build dashboards
menuTitle: Build dashboards
weight: 2
keywords:
- grafana
- dashboard
- dashboard folders
- create
- build
- design
---
# Build dashboards
This section includes the following topics:
{{< section >}}
## Dynamic dashboards
You can create more interactive and dynamic dashboards by adding and using [variables]({{< relref "../variables" >}}). Instead of hard-coding things like server, application, and sensor names in your metric queries, you can use variables in their place. Read more about variables [here]({{< relref "../variables" >}}).
| docs/sources/dashboards/build-dashboards/_index.md | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017164109158329666,
0.0001671842037467286,
0.00015930910012684762,
0.0001706023613223806,
0.000005584648988588015
] |
{
"id": 8,
"code_window": [
"\t\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t\t},\n",
"\t\t\t\t},\n",
"\t\t\t}\n",
"\n",
"\t\t\tazureMonitorQuery := &types.AzureMonitorQuery{\n",
"\t\t\t\tURL: tt.expectedURL,\n",
"\t\t\t\tTarget: tt.azureMonitorQueryTarget,\n",
"\t\t\t\tRefID: \"A\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 268
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.9976798892021179,
0.14848606288433075,
0.000165572389960289,
0.0002155246038455516,
0.3263229429721832
] |
{
"id": 8,
"code_window": [
"\t\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t\t},\n",
"\t\t\t\t},\n",
"\t\t\t}\n",
"\n",
"\t\t\tazureMonitorQuery := &types.AzureMonitorQuery{\n",
"\t\t\t\tURL: tt.expectedURL,\n",
"\t\t\t\tTarget: tt.azureMonitorQueryTarget,\n",
"\t\t\t\tRefID: \"A\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 268
} | package loader
import (
"context"
"github.com/grafana/grafana/pkg/plugins"
)
// Service is responsible for loading plugins from the file system.
type Service interface {
// Load will return a list of plugins found in the provided file system paths.
Load(ctx context.Context, class plugins.Class, paths []string) ([]*plugins.Plugin, error)
// Unload will unload a specified plugin from the file system.
Unload(ctx context.Context, pluginID string) error
}
| pkg/plugins/manager/loader/ifaces.go | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00016950284771155566,
0.0001657800457905978,
0.00016205724386963993,
0.0001657800457905978,
0.0000037228019209578633
] |
{
"id": 8,
"code_window": [
"\t\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t\t},\n",
"\t\t\t\t},\n",
"\t\t\t}\n",
"\n",
"\t\t\tazureMonitorQuery := &types.AzureMonitorQuery{\n",
"\t\t\t\tURL: tt.expectedURL,\n",
"\t\t\t\tTarget: tt.azureMonitorQueryTarget,\n",
"\t\t\t\tRefID: \"A\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 268
} | package setting
import (
"errors"
"strings"
"time"
"gopkg.in/ini.v1"
)
var (
ErrOperationNotPermitted = errors.New("operation not permitted")
)
type ValidationError struct {
Errors []error
}
func (v ValidationError) Error() string {
builder := strings.Builder{}
for i, e := range v.Errors {
builder.WriteString(e.Error())
if i != len(v.Errors)-1 {
builder.WriteString(", ")
}
}
return builder.String()
}
// Provider is a settings provider abstraction
// with thread-safety and runtime updates.
type Provider interface {
// Current returns a SettingsBag with a static copy of
// the current configured pairs of key/values for each
// configuration section.
Current() SettingsBag
// Update receives a SettingsBag with the pairs of key/values
// to be updated per section and a SettingsRemovals with the
// section keys to be removed.
Update(updates SettingsBag, removals SettingsRemovals) error
// KeyValue returns a key-value abstraction
// for the given pair of section and key.
KeyValue(section, key string) KeyValue
// Section returns a settings section
// abstraction for the given section name.
Section(section string) Section
// RegisterReloadHandler registers a handler for validation and reload
// of configuration updates tied to a specific section
RegisterReloadHandler(section string, handler ReloadHandler)
}
// Section is a settings section copy
// with all of its pairs of keys-values.
type Section interface {
// KeyValue returns a key-value
// abstraction for the given key.
KeyValue(key string) KeyValue
}
// KeyValue represents a settings key-value
// for a given pair of section and key.
type KeyValue interface {
// Key returns pair's key.
Key() string
// Value returns pair's value.
Value() string
// MustString returns the value's string representation
// If empty, then it returns the given default.
MustString(defaultVal string) string
// MustBool returns the value's boolean representation
// Otherwise returns the given default.
MustBool(defaultVal bool) bool
// MustDuration returns the value's time.Duration
// representation. Otherwise returns the given default.
MustDuration(defaultVal time.Duration) time.Duration
}
// ReloadHandler defines the expected behaviour from a
// service that have support for configuration reloads.
type ReloadHandler interface {
// Reload handles reloading of configuration changes.
Reload(section Section) error
// Validate validates the configuration, if the validation
// fails the configuration will not be updated neither reloaded.
Validate(section Section) error
}
type SettingsBag map[string]map[string]string
type SettingsRemovals map[string][]string
func ProvideProvider(cfg *Cfg) *OSSImpl {
return &OSSImpl{
Cfg: cfg,
}
}
type OSSImpl struct {
Cfg *Cfg
}
func (o OSSImpl) Current() SettingsBag {
settingsCopy := make(SettingsBag)
for _, section := range o.Cfg.Raw.Sections() {
settingsCopy[section.Name()] = make(map[string]string)
for _, key := range section.Keys() {
settingsCopy[section.Name()][key.Name()] = RedactedValue(EnvKey(section.Name(), key.Name()), key.Value())
}
}
return settingsCopy
}
func (OSSImpl) Update(SettingsBag, SettingsRemovals) error {
return errors.New("oss settings provider do not have support for settings updates")
}
func (o *OSSImpl) KeyValue(section, key string) KeyValue {
return o.Section(section).KeyValue(key)
}
func (o *OSSImpl) Section(section string) Section {
return §ionImpl{section: o.Cfg.Raw.Section(section)}
}
func (OSSImpl) RegisterReloadHandler(string, ReloadHandler) {}
func (o OSSImpl) IsFeatureToggleEnabled(name string) bool {
return o.Cfg.IsFeatureToggleEnabled(name)
}
type keyValImpl struct {
key *ini.Key
}
func (k *keyValImpl) Key() string {
return k.key.Name()
}
func (k *keyValImpl) Value() string {
return k.key.Value()
}
func (k *keyValImpl) MustString(defaultVal string) string {
return k.key.MustString(defaultVal)
}
func (k *keyValImpl) MustBool(defaultVal bool) bool {
return k.key.MustBool(defaultVal)
}
func (k *keyValImpl) MustDuration(defaultVal time.Duration) time.Duration {
return k.key.MustDuration(defaultVal)
}
type sectionImpl struct {
section *ini.Section
}
func (s *sectionImpl) KeyValue(key string) KeyValue {
return &keyValImpl{s.section.Key(key)}
}
| pkg/setting/provider.go | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00033077114494517446,
0.0001816432923078537,
0.0001608131715329364,
0.00017040678358171135,
0.00003798185571213253
] |
{
"id": 8,
"code_window": [
"\t\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t\t},\n",
"\t\t\t\t},\n",
"\t\t\t}\n",
"\n",
"\t\t\tazureMonitorQuery := &types.AzureMonitorQuery{\n",
"\t\t\t\tURL: tt.expectedURL,\n",
"\t\t\t\tTarget: tt.azureMonitorQueryTarget,\n",
"\t\t\t\tRefID: \"A\",\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"add",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "add",
"edit_start_line_idx": 268
} | <svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" viewBox="0 0 24 24"><path d="M3,7h18c0.6,0,1-0.4,1-1s-0.4-1-1-1H3C2.4,5,2,5.4,2,6S2.4,7,3,7z M3,11h10c0.6,0,1-0.4,1-1s-0.4-1-1-1H3c-0.6,0-1,0.4-1,1S2.4,11,3,11z M21,17H3c-0.6,0-1,0.4-1,1s0.4,1,1,1h18c0.6,0,1-0.4,1-1S21.6,17,21,17z M3,15h10c0.6,0,1-0.4,1-1s-0.4-1-1-1H3c-0.6,0-1,0.4-1,1S2.4,15,3,15z M21.8,9.7c-0.4-0.4-1-0.5-1.4-0.1l-2,1.7c0,0-0.1,0.1-0.1,0.1c-0.4,0.4-0.3,1.1,0.1,1.4l2,1.7c0.2,0.1,0.4,0.2,0.6,0.2c0.3,0,0.6-0.1,0.8-0.4c0.4-0.4,0.3-1.1-0.1-1.4L20.6,12l1.1-0.9C22.1,10.7,22.1,10.1,21.8,9.7z"/></svg> | public/img/icons/solid/left-indent.svg | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00028760137502104044,
0.00028760137502104044,
0.00028760137502104044,
0.00028760137502104044,
0
] |
{
"id": 9,
"code_window": [
"\t\t\t\t\tFrom: fromStart,\n",
"\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t},\n",
"\t\t\t\tFilter: tt.expectedFilter,\n",
"\t\t\t}\n",
"\t\t\tif azureMonitorQuery.URL == \"\" {\n",
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t}\n",
"\t\t\tif tt.azureMonitorVariedProperties[\"region\"] != nil {\n",
"\t\t\t\t// If the region is included, the filter will be added in the Body of the request\n",
"\t\t\t\tazureMonitorQuery.BodyFilter = tt.expectedFilter\n",
"\t\t\t} else {\n",
"\t\t\t\t// In other case, the filter will be added in the URL\n",
"\t\t\t\tif tt.expectedFilter != \"\" {\n",
"\t\t\t\t\tassert.Equal(t, tt.expectedFilter, queries[0].Params.Get(\"$filter\"))\n",
"\t\t\t\t} else {\n",
"\t\t\t\t\tassert.Equal(t, false, queries[0].Params.Has(\"$filter\"))\n",
"\t\t\t\t}\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 277
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.01566958986222744,
0.0010920206550508738,
0.00015942854224704206,
0.0001744799956213683,
0.0024658245965838432
] |
{
"id": 9,
"code_window": [
"\t\t\t\t\tFrom: fromStart,\n",
"\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t},\n",
"\t\t\t\tFilter: tt.expectedFilter,\n",
"\t\t\t}\n",
"\t\t\tif azureMonitorQuery.URL == \"\" {\n",
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t}\n",
"\t\t\tif tt.azureMonitorVariedProperties[\"region\"] != nil {\n",
"\t\t\t\t// If the region is included, the filter will be added in the Body of the request\n",
"\t\t\t\tazureMonitorQuery.BodyFilter = tt.expectedFilter\n",
"\t\t\t} else {\n",
"\t\t\t\t// In other case, the filter will be added in the URL\n",
"\t\t\t\tif tt.expectedFilter != \"\" {\n",
"\t\t\t\t\tassert.Equal(t, tt.expectedFilter, queries[0].Params.Get(\"$filter\"))\n",
"\t\t\t\t} else {\n",
"\t\t\t\t\tassert.Equal(t, false, queries[0].Params.Has(\"$filter\"))\n",
"\t\t\t\t}\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 277
} | import { screen, render, within } from '@testing-library/react';
import React from 'react';
import { DataFrame, toDataFrame, FieldType, InternalTimeZones } from '@grafana/data';
import { ExploreId } from 'app/types/explore';
import { TableContainer } from './TableContainer';
function getTable(): HTMLElement {
return screen.getAllByRole('table')[0];
}
function getRowsData(rows: HTMLElement[]): Object[] {
let content = [];
for (let i = 1; i < rows.length; i++) {
content.push({
time: within(rows[i]).getByText(/2021*/).textContent,
text: within(rows[i]).getByText(/test_string_*/).textContent,
});
}
return content;
}
const dataFrame = toDataFrame({
name: 'A',
fields: [
{
name: 'time',
type: FieldType.time,
values: [1609459200000, 1609470000000, 1609462800000, 1609466400000],
config: {
custom: {
filterable: false,
},
},
},
{
name: 'text',
type: FieldType.string,
values: ['test_string_1', 'test_string_2', 'test_string_3', 'test_string_4'],
config: {
custom: {
filterable: false,
},
},
},
],
});
const defaultProps = {
exploreId: ExploreId.left as ExploreId,
loading: false,
width: 800,
onCellFilterAdded: jest.fn(),
tableResult: [dataFrame],
splitOpenFn: (() => {}) as any,
range: {} as any,
timeZone: InternalTimeZones.utc,
};
describe('TableContainer', () => {
it('should render component', () => {
render(<TableContainer {...defaultProps} />);
expect(getTable()).toBeInTheDocument();
const rows = within(getTable()).getAllByRole('row');
expect(rows).toHaveLength(5);
expect(getRowsData(rows)).toEqual([
{ time: '2021-01-01 00:00:00', text: 'test_string_1' },
{ time: '2021-01-01 03:00:00', text: 'test_string_2' },
{ time: '2021-01-01 01:00:00', text: 'test_string_3' },
{ time: '2021-01-01 02:00:00', text: 'test_string_4' },
]);
});
it('should render 0 series returned on no items', () => {
const emptyFrames = [
{
name: 'TableResultName',
fields: [],
length: 0,
},
] as DataFrame[];
render(<TableContainer {...defaultProps} tableResult={emptyFrames} />);
expect(screen.getByText('0 series returned')).toBeInTheDocument();
});
it('should update time when timezone changes', () => {
const { rerender } = render(<TableContainer {...defaultProps} />);
const rowsBeforeChange = within(getTable()).getAllByRole('row');
expect(getRowsData(rowsBeforeChange)).toEqual([
{ time: '2021-01-01 00:00:00', text: 'test_string_1' },
{ time: '2021-01-01 03:00:00', text: 'test_string_2' },
{ time: '2021-01-01 01:00:00', text: 'test_string_3' },
{ time: '2021-01-01 02:00:00', text: 'test_string_4' },
]);
rerender(<TableContainer {...defaultProps} timeZone="cest" />);
const rowsAfterChange = within(getTable()).getAllByRole('row');
expect(getRowsData(rowsAfterChange)).toEqual([
{ time: '2020-12-31 19:00:00', text: 'test_string_1' },
{ time: '2020-12-31 22:00:00', text: 'test_string_2' },
{ time: '2020-12-31 20:00:00', text: 'test_string_3' },
{ time: '2020-12-31 21:00:00', text: 'test_string_4' },
]);
});
});
| public/app/features/explore/TableContainer.test.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017591188952792436,
0.00017410051077604294,
0.0001724227040540427,
0.00017451171879656613,
0.00000105708795672399
] |
{
"id": 9,
"code_window": [
"\t\t\t\t\tFrom: fromStart,\n",
"\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t},\n",
"\t\t\t\tFilter: tt.expectedFilter,\n",
"\t\t\t}\n",
"\t\t\tif azureMonitorQuery.URL == \"\" {\n",
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t}\n",
"\t\t\tif tt.azureMonitorVariedProperties[\"region\"] != nil {\n",
"\t\t\t\t// If the region is included, the filter will be added in the Body of the request\n",
"\t\t\t\tazureMonitorQuery.BodyFilter = tt.expectedFilter\n",
"\t\t\t} else {\n",
"\t\t\t\t// In other case, the filter will be added in the URL\n",
"\t\t\t\tif tt.expectedFilter != \"\" {\n",
"\t\t\t\t\tassert.Equal(t, tt.expectedFilter, queries[0].Params.Get(\"$filter\"))\n",
"\t\t\t\t} else {\n",
"\t\t\t\t\tassert.Equal(t, false, queries[0].Params.Has(\"$filter\"))\n",
"\t\t\t\t}\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 277
} | import { render, screen } from '@testing-library/react';
import React from 'react';
import { FieldColorModeId } from '@grafana/data';
import { Legend } from './Legend';
import { NodeDatum } from './types';
describe('Legend', () => {
it('renders ok without nodes', () => {
render(<Legend nodes={[]} onSort={(sort) => {}} sortable={false} />);
});
it('renders ok with color fields', () => {
const nodes = [
{
id: 'nodeId',
mainStat: { config: { displayName: 'stat1' } },
secondaryStat: { config: { displayName: 'stat2' } },
arcSections: [{ config: { displayName: 'error', color: { mode: FieldColorModeId.Fixed, fixedColor: 'red' } } }],
},
] as NodeDatum[];
render(<Legend nodes={nodes} onSort={(sort) => {}} sortable={false} />);
const items = screen.getAllByLabelText(/VizLegend series/);
expect(items.length).toBe(3);
const item = screen.getByLabelText(/VizLegend series error/);
expect((item.firstChild as HTMLDivElement).style.getPropertyValue('background')).toBe('rgb(242, 73, 92)');
});
});
| public/app/plugins/panel/nodeGraph/Legend.test.tsx | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00017822014342527837,
0.00017569586634635925,
0.00017253126134164631,
0.00017601603758521378,
0.0000022549909317604033
] |
{
"id": 9,
"code_window": [
"\t\t\t\t\tFrom: fromStart,\n",
"\t\t\t\t\tTo: fromStart.Add(34 * time.Minute),\n",
"\t\t\t\t},\n",
"\t\t\t\tFilter: tt.expectedFilter,\n",
"\t\t\t}\n",
"\t\t\tif azureMonitorQuery.URL == \"\" {\n",
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\t\t\t}\n",
"\t\t\tif tt.azureMonitorVariedProperties[\"region\"] != nil {\n",
"\t\t\t\t// If the region is included, the filter will be added in the Body of the request\n",
"\t\t\t\tazureMonitorQuery.BodyFilter = tt.expectedFilter\n",
"\t\t\t} else {\n",
"\t\t\t\t// In other case, the filter will be added in the URL\n",
"\t\t\t\tif tt.expectedFilter != \"\" {\n",
"\t\t\t\t\tassert.Equal(t, tt.expectedFilter, queries[0].Params.Get(\"$filter\"))\n",
"\t\t\t\t} else {\n",
"\t\t\t\t\tassert.Equal(t, false, queries[0].Params.Has(\"$filter\"))\n",
"\t\t\t\t}\n"
],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 277
} | # Contribute
This directory contains guides for contributors to the Grafana project.
- [Create a pull request](create-pull-request.md)
- [Contribute documentation](../contribute/documentation/README.md)
- [Developer guide](developer-guide.md)
- [Triage issues](triage-issues.md)
- [Merge a pull request](merge-pull-request.md)
The `style-guides` directory contains style guides for the Grafana software project and documentation.
- [Backend style guide](style-guides/backend.md) for how to style and format backend functionality and code.
- [Frontend style guide](style-guides/frontend.md) for how to style and format the user-facing functionality and code.
- [Redux framework](style-guides/redux.md) for designing the Grafana redux framework.
- [Themes style guide](style-guides/themes.md) for designing and updating Grafana themes.
| contribute/README.md | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001690604112809524,
0.00016499623598065227,
0.00016093206068035215,
0.00016499623598065227,
0.000004064175300300121
] |
{
"id": 10,
"code_window": [
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n",
"\n",
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\t\t\tif diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, \"Params\")); diff != \"\" {\n",
"\t\t\t\tt.Errorf(\"Result mismatch (-want +got):\\n%s\", diff)\n",
"\t\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 283
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.009518655017018318,
0.001291602966375649,
0.00016011546540539712,
0.00017667235806584358,
0.0022587247658520937
] |
{
"id": 10,
"code_window": [
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n",
"\n",
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\t\t\tif diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, \"Params\")); diff != \"\" {\n",
"\t\t\t\tt.Errorf(\"Result mismatch (-want +got):\\n%s\", diff)\n",
"\t\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 283
} | # Dashboard Datasource - Native Plugin
This is a **built in** datasource that lets you reuse the query from other panels in the
same dashboard.
| public/app/plugins/datasource/dashboard/README.md | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00022984022507444024,
0.00022984022507444024,
0.00022984022507444024,
0.00022984022507444024,
0
] |
{
"id": 10,
"code_window": [
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n",
"\n",
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\t\t\tif diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, \"Params\")); diff != \"\" {\n",
"\t\t\t\tt.Errorf(\"Result mismatch (-want +got):\\n%s\", diff)\n",
"\t\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 283
} | #! /usr/bin/env bash
# Wrapper for the grafana binary
# This file serves as a wrapper for the grafana binary. It ensures we set
# the system-wide Grafana configuration that was bundled with the package as we
# use the binary.
DEFAULT=/etc/default/grafana
GRAFANA_HOME="${GRAFANA_HOME:-/usr/share/grafana}"
CONF_DIR=/etc/grafana
DATA_DIR=/var/lib/grafana
PLUGINS_DIR=/var/lib/grafana/plugins
LOG_DIR=/var/log/grafana
CONF_FILE=$CONF_DIR/grafana.ini
PROVISIONING_CFG_DIR=$CONF_DIR/provisioning
EXECUTABLE="$GRAFANA_HOME/bin/grafana"
if [ ! -x $EXECUTABLE ]; then
echo "$EXECUTABLE not installed or not executable"
exit 5
fi
# overwrite settings from default file
if [ -f "$DEFAULT" ]; then
. "$DEFAULT"
fi
OPTS="--homepath=${GRAFANA_HOME} \
--config=${CONF_FILE} \
--configOverrides='cfg:default.paths.provisioning=$PROVISIONING_CFG_DIR \
cfg:default.paths.data=${DATA_DIR} \
cfg:default.paths.logs=${LOG_DIR} \
cfg:default.paths.plugins=${PLUGINS_DIR}'"
CMD=server
eval $EXECUTABLE "$CMD" "$OPTS" "$@"
| packaging/wrappers/grafana-server | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0009303076658397913,
0.00032527491566725075,
0.00016068457625806332,
0.00017339376790914685,
0.0003026476770173758
] |
{
"id": 10,
"code_window": [
"\t\t\t\tazureMonitorQuery.URL = \"/subscriptions/12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics\"\n",
"\t\t\t}\n",
"\n",
"\t\t\tqueries, err := datasource.buildQueries(log.New(\"test\"), tsdbQuery, dsInfo)\n",
"\t\t\trequire.NoError(t, err)\n",
"\t\t\tif diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, \"Params\")); diff != \"\" {\n",
"\t\t\t\tt.Errorf(\"Result mismatch (-want +got):\\n%s\", diff)\n",
"\t\t\t}\n",
"\n"
],
"labels": [
"keep",
"keep",
"keep",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [],
"file_path": "pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource_test.go",
"type": "replace",
"edit_start_line_idx": 283
} | # 2.6.1 (unreleased, 2.6.x branch)
### New Features
- **Elasticsearch**: Support for derivative unit option, closes [#3512](https://github.com/grafana/grafana/issues/3512)
### Bug fixes
- **Graph Panel**: Fixed typehead when adding series style override, closes [#3554](https://github.com/grafana/grafana/issues/3554)
# 2.6.0 (2015-12-14)
### New Features
- **Elasticsearch**: Support for pipeline aggregations Moving average and derivative, closes [#2715](https://github.com/grafana/grafana/issues/2715)
- **Elasticsearch**: Support for inline script and missing options for metrics, closes [#3500](https://github.com/grafana/grafana/issues/3500)
- **Syslog**: Support for syslog logging, closes [#3161](https://github.com/grafana/grafana/pull/3161)
- **Timepicker**: Always show refresh button even with refresh rate, closes [#3498](https://github.com/grafana/grafana/pull/3498)
- **Login**: Make it possible to change the login hint on the login page, closes [#2571](https://github.com/grafana/grafana/pull/2571)
### Bug Fixes
- **metric editors**: Fix for clicking typeahead auto dropdown option, fixes [#3428](https://github.com/grafana/grafana/issues/3428)
- **influxdb**: Fixed issue showing Group By label only on first query, fixes [#3453](https://github.com/grafana/grafana/issues/3453)
- **logging**: Add more verbose info logging for http requests, closes [#3405](https://github.com/grafana/grafana/pull/3405)
# 2.6.0-Beta1 (2015-12-04)
### New Table Panel
- **table**: New powerful and flexible table panel, closes [#215](https://github.com/grafana/grafana/issues/215)
### Enhancements
- **CloudWatch**: Support for multiple AWS Credentials, closes [#3053](https://github.com/grafana/grafana/issues/3053), [#3080](https://github.com/grafana/grafana/issues/3080)
- **Elasticsearch**: Support for dynamic daily indices for annotations, closes [#3061](https://github.com/grafana/grafana/issues/3061)
- **Elasticsearch**: Support for setting min_doc_count for date histogram, closes [#3416](https://github.com/grafana/grafana/issues/3416)
- **Graph Panel**: Option to hide series with all zeroes from legend and tooltip, closes [#1381](https://github.com/grafana/grafana/issues/1381), [#3336](https://github.com/grafana/grafana/issues/3336)
### Bug Fixes
- **cloudwatch**: fix for handling of period for long time ranges, fixes [#3086](https://github.com/grafana/grafana/issues/3086)
- **dashboard**: fix for collapse row by clicking on row title, fixes [#3065](https://github.com/grafana/grafana/issues/3065)
- **influxdb**: fix for relative time ranges `last x months` and `last x years`, fixes [#3067](https://github.com/grafana/grafana/issues/3067)
- **graph**: layout fix for color picker when right side legend was enabled, fixes [#3093](https://github.com/grafana/grafana/issues/3093)
- **elasticsearch**: disabling elastic query (via eye) caused error, fixes [#3300](https://github.com/grafana/grafana/issues/3300)
### Breaking changes
- **elasticsearch**: Manual json edited queries are not supported any more (They very barely worked in 2.5)
# 2.5 (2015-10-28)
**New Feature: Mix data sources**
- A built in data source is now available named `-- Mixed --`, When picked in the metrics tab,
it allows you to add queries of different data source types & instances to the same graph/panel!
[Issue #436](https://github.com/grafana/grafana/issues/436)
**New Feature: Elasticsearch Metrics Query Editor and Viz Support**
- Feature rich query editor and processing features enables you to issues all kind of metric queries to Elasticsearch
- See [Issue #1034](https://github.com/grafana/grafana/issues/1034) for more info.
**New Feature: New and much improved time picker**
- Support for quick ranges like `Today`, `This day last week`, `This week`, `The day so far`, etc.
- Improved UI and improved support for UTC, [Issue #2761](https://github.com/grafana/grafana/issues/2761) for more info.
**User Onboarding**
- Org admin can now send email invites (or invite links) to people who are not yet Grafana users
- Sign up flow now supports email verification (if enabled)
- See [Issue #2353](https://github.com/grafana/grafana/issues/2353) for more info.
**Other new Features && Enhancements**
- [Pull #2720](https://github.com/grafana/grafana/pull/2720). Admin: Initial basic quota support (per Org)
- [Issue #2577](https://github.com/grafana/grafana/issues/2577). Panel: Resize handles in panel bottom right corners for easy width and height change
- [Issue #2457](https://github.com/grafana/grafana/issues/2457). Admin: admin page for all grafana organizations (list / edit view)
- [Issue #1186](https://github.com/grafana/grafana/issues/1186). Time Picker: New option `today`, will set time range from midnight to now
- [Issue #2647](https://github.com/grafana/grafana/issues/2647). InfluxDB: You can now set group by time interval on each query
- [Issue #2599](https://github.com/grafana/grafana/issues/2599). InfluxDB: Improved alias support, you can now use the `AS` clause for each select statement
- [Issue #2708](https://github.com/grafana/grafana/issues/2708). InfluxDB: You can now set math expression for select clauses.
- [Issue #1575](https://github.com/grafana/grafana/issues/1575). Drilldown link: now you can click on the external link icon in the panel header to access drilldown links!
- [Issue #1646](https://github.com/grafana/grafana/issues/1646). OpenTSDB: Fetch list of aggregators from OpenTSDB
- [Issue #2955](https://github.com/grafana/grafana/issues/2955). Graph: More axis units (Length, Volume, Temperature, Pressure, etc), thanks @greglook
- [Issue #2928](https://github.com/grafana/grafana/issues/2928). LDAP: Support for searching for groups memberships, i.e. POSIX (no memberOf) schemas, also multiple ldap servers, and root ca cert, thanks @abligh
**Fixes**
- [Issue #2413](https://github.com/grafana/grafana/issues/2413). InfluxDB 0.9: Fix for handling empty series object in response from influxdb
- [Issue #2574](https://github.com/grafana/grafana/issues/2574). Snapshot: Fix for snapshot with expire 7 days option, 7 days option not correct, was 7 hours
- [Issue #2568](https://github.com/grafana/grafana/issues/2568). AuthProxy: Fix for server side rendering of panel when using auth proxy
- [Issue #2490](https://github.com/grafana/grafana/issues/2490). Graphite: Dashboard import was broken in 2.1 and 2.1.1, working now
- [Issue #2565](https://github.com/grafana/grafana/issues/2565). TimePicker: Fix for when you applied custom time range it did not refresh dashboard
- [Issue #2563](https://github.com/grafana/grafana/issues/2563). Annotations: Fixed issue when html sanitizer fails for title to annotation body, now fallbacks to html escaping title and text
- [Issue #2564](https://github.com/grafana/grafana/issues/2564). Templating: Another attempt at fixing #2534 (Init multi value template var used in repeat panel from url)
- [Issue #2620](https://github.com/grafana/grafana/issues/2620). Graph: multi series tooltip did no highlight correct point when stacking was enabled and series were of different resolution
- [Issue #2636](https://github.com/grafana/grafana/issues/2636). InfluxDB: Do no show template vars in dropdown for tag keys and group by keys
- [Issue #2604](https://github.com/grafana/grafana/issues/2604). InfluxDB: More alias options, can now use `$[0-9]` syntax to reference part of a measurement name (separated by dots)
**Breaking Changes**
- Notice to makers/users of custom data sources, there is a minor breaking change in 2.2 that
require an update to custom data sources for them to work in 2.2. [Read this doc](https://github.com/grafana/grafana/tree/master/docs/sources/datasources/plugin_api.md) for more on the
data source api change.
- Data source api changes, [PLUGIN_CHANGES.md](https://github.com/grafana/grafana/blob/master/public/app/plugins/PLUGIN_CHANGES.md)
- The duplicate query function used in data source editors is changed, and moveMetricQuery function was renamed
**Tech (Note for devs)**
Started using Typescript (transpiled to ES5), uncompiled typescript files and less files are in public folder (in source tree)
This folder is never modified by build steps. Compiled css and javascript files are put in public_gen, all other files
that do not undergo transformation are just copied from public to public_gen, it is public_gen that is used by grafana-server
if it is found.
Grunt & Watch tasks:
- `grunt` : default task, will remove public_gen, copy over all files from public, do less & typescript compilation
- `grunt watch`: will watch for changes to less, and typescript files and compile them to public_gen, and for other files it will just copy them to public_gen
# 2.1.3 (2015-08-24)
**Fixes**
- [Issue #2580](https://github.com/grafana/grafana/issues/2580). Packaging: ldap.toml was not marked as config file and could be overwritten in upgrade
- [Issue #2564](https://github.com/grafana/grafana/issues/2564). Templating: Another attempt at fixing #2534 (Init multi value template var used in repeat panel from url)
# 2.1.2 (2015-08-20)
**Fixes**
- [Issue #2558](https://github.com/grafana/grafana/issues/2558). DragDrop: Fix for broken drag drop behavior
- [Issue #2534](https://github.com/grafana/grafana/issues/2534). Templating: fix for setting template variable value via url and having repeated panels or rows
# 2.1.1 (2015-08-11)
**Fixes**
- [Issue #2443](https://github.com/grafana/grafana/issues/2443). Templating: Fix for buggy repeat row behavior when combined with with repeat panel due to recent change before 2.1 release
- [Issue #2442](https://github.com/grafana/grafana/issues/2442). Templating: Fix text panel when using template variables in text in in repeated panel
- [Issue #2446](https://github.com/grafana/grafana/issues/2446). InfluxDB: Fix for using template vars inside alias field (InfluxDB 0.9)
- [Issue #2460](https://github.com/grafana/grafana/issues/2460). SinglestatPanel: Fix to handle series with no data points
- [Issue #2461](https://github.com/grafana/grafana/issues/2461). LDAP: Fix for ldap users with empty email address
- [Issue #2484](https://github.com/grafana/grafana/issues/2484). Graphite: Fix bug when using series ref (#A-Z) and referenced series is hidden in query editor.
- [Issue #1896](https://github.com/grafana/grafana/issues/1896). Postgres: Dashboard search is now case insensitive when using Postgres
**Enhancements**
- [Issue #2477](https://github.com/grafana/grafana/issues/2477). InfluxDB(0.9): Added more condition operators (`<`, `>`, `<>`, `!~`), thx @thuck
- [Issue #2483](https://github.com/grafana/grafana/issues/2484). InfluxDB(0.9): Use \$col as option in alias patterns, thx @thuck
# 2.1.0 (2015-08-04)
**Data sources**
- [Issue #1525](https://github.com/grafana/grafana/issues/1525). InfluxDB: Full support for InfluxDB 0.9 with new adapted query editor
- [Issue #2191](https://github.com/grafana/grafana/issues/2191). KariosDB: Grafana now ships with a KariosDB data source plugin, thx @masaori335
- [Issue #1177](https://github.com/grafana/grafana/issues/1177). OpenTSDB: Limit tags by metric, OpenTSDB config option tsd.core.meta.enable_realtime_ts must enabled for OpenTSDB lookup api
- [Issue #1250](https://github.com/grafana/grafana/issues/1250). OpenTSDB: Support for template variable values lookup queries
**New dashboard features**
- [Issue #1144](https://github.com/grafana/grafana/issues/1144). Templating: You can now select multiple template variables values at the same time.
- [Issue #1922](https://github.com/grafana/grafana/issues/1922). Templating: Specify multiple variable values via URL params.
- [Issue #1888](https://github.com/grafana/grafana/issues/1144). Templating: Repeat panel or row for each selected template variable value
- [Issue #1888](https://github.com/grafana/grafana/issues/1944). Dashboard: Custom Navigation links & dynamic links to related dashboards
- [Issue #590](https://github.com/grafana/grafana/issues/590). Graph: Define series color using regex rule
- [Issue #2162](https://github.com/grafana/grafana/issues/2162). Graph: New series style override, negative-y transform and stack groups
- [Issue #2096](https://github.com/grafana/grafana/issues/2096). Dashboard list panel: Now supports search by multiple tags
- [Issue #2203](https://github.com/grafana/grafana/issues/2203). Singlestat: Now support string values
**User or Organization admin**
- [Issue #1899](https://github.com/grafana/grafana/issues/1899). Organization: You can now update the organization user role directly (without removing and readding the organization user).
- [Issue #2088](https://github.com/grafana/grafana/issues/2088). Roles: New user role `Read Only Editor` that replaces the old `Viewer` role behavior
**Backend**
- [Issue #2218](https://github.com/grafana/grafana/issues/2218). Auth: You can now authenticate against api with username / password using basic auth
- [Issue #2095](https://github.com/grafana/grafana/issues/2095). Search: Search now supports filtering by multiple dashboard tags
- [Issue #1905](https://github.com/grafana/grafana/issues/1905). GitHub OAuth: You can now configure a GitHub team membership requirement, thx @dewski
- [Issue #2052](https://github.com/grafana/grafana/issues/2052). GitHub OAuth: You can now configure a GitHub organization requirement, thx @indrekj
- [Issue #1891](https://github.com/grafana/grafana/issues/1891). Security: New config option to disable the use of gravatar for profile images
- [Issue #1921](https://github.com/grafana/grafana/issues/1921). Auth: Support for user authentication via reverse proxy header (like X-Authenticated-User, or X-WEBAUTH-USER)
- [Issue #960](https://github.com/grafana/grafana/issues/960). Search: Backend can now index a folder with json files, will be available in search (saving back to folder is not supported, this feature is meant for static generated json dashboards)
**Breaking changes**
- [Issue #1826](https://github.com/grafana/grafana/issues/1826). User role 'Viewer' are now prohibited from entering edit mode (and doing other transient dashboard edits). A new role `Read Only Editor` will replace the old Viewer behavior
- [Issue #1928](https://github.com/grafana/grafana/issues/1928). HTTP API: GET /api/dashboards/db/:slug response changed property `model` to `dashboard` to match the POST request naming
- Backend render URL changed from `/render/dashboard/solo` `render/dashboard-solo/` (in order to have consistent dashboard url `/dashboard/:type/:slug`)
- Search HTTP API response has changed (simplified), tags list moved to separate HTTP resource URI
- Data source HTTP api breaking change, ADD data source is now POST /api/datasources/, update is now PUT /api/datasources/:id
**Fixes**
- [Issue #2185](https://github.com/grafana/grafana/issues/2185). Graph: fixed PNG rendering of panels with legend table to the right
- [Issue #2163](https://github.com/grafana/grafana/issues/2163). Backend: Load dashboards with capital letters in the dashboard url slug (url id)
# 2.0.3 (unreleased - 2.0.x branch)
**Fixes**
- [Issue #1872](https://github.com/grafana/grafana/issues/1872). Firefox/IE issue, invisible text in dashboard search fixed
- [Issue #1857](https://github.com/grafana/grafana/issues/1857). /api/login/ping Fix for issue when behind reverse proxy and subpath
- [Issue #1863](https://github.com/grafana/grafana/issues/1863). MySQL: Dashboard.data column type changed to mediumtext (sql migration added)
# 2.0.2 (2015-04-22)
**Fixes**
- [Issue #1832](https://github.com/grafana/grafana/issues/1832). Graph Panel + Legend Table mode: Many series caused zero height graph, now legend will never reduce the height of the graph below 50% of row height.
- [Issue #1846](https://github.com/grafana/grafana/issues/1846). Snapshots: Fixed issue with snapshotting dashboards with an interval template variable
- [Issue #1848](https://github.com/grafana/grafana/issues/1848). Panel timeshift: You can now use panel timeshift without a relative time override
# 2.0.1 (2015-04-20)
**Fixes**
- [Issue #1784](https://github.com/grafana/grafana/issues/1784). Data source proxy: Fixed issue with using data source proxy when grafana is behind nginx suburl
- [Issue #1749](https://github.com/grafana/grafana/issues/1749). Graph Panel: Table legends are now visible when rendered to PNG
- [Issue #1786](https://github.com/grafana/grafana/issues/1786). Graph Panel: Legend in table mode now aligns, graph area is reduced depending on how many series
- [Issue #1734](https://github.com/grafana/grafana/issues/1734). Support for unicode / international characters in dashboard title (improved slugify)
- [Issue #1782](https://github.com/grafana/grafana/issues/1782). GitHub OAuth: Now works with GitHub for Enterprise, thanks @williamjoy
- [Issue #1780](https://github.com/grafana/grafana/issues/1780). Dashboard snapshot: Should not require login to view snapshot, Fixes #1780
# 2.0.0-Beta3 (2015-04-12)
**RPM / DEB Package changes (to follow HFS)**
- binary name changed to grafana-server
- does not install to `/opt/grafana` any more, installs to `/usr/share/grafana`
- binary to `/usr/sbin/grafana-server`
- init.d script improvements, renamed to `/etc/init.d/grafana-server`
- added default file with environment variables,
- `/etc/default/grafana-server` (deb/ubuntu)
- `/etc/sysconfig/grafana-server` (centos/redhat)
- added systemd service file, tested on debian jessie and centos7
- config file in same location `/etc/grafana/grafana.ini` (now complete config file but with every setting commented out)
- data directory (where sqlite3) file is stored is now by default `/var/lib/grafana`
- no symlinking current to versions anymore
- For more info see [Issue #1758](https://github.com/grafana/grafana/issues/1758).
**Config breaking change (setting rename)**
- `[log] root_path` has changed to `[paths] logs`
# 2.0.0-Beta2 (...)
**Enhancements**
- [Issue #1701](https://github.com/grafana/grafana/issues/1701). Share modal: Override UI theme via URL param for Share link, rendered panel, or embedded panel
- [Issue #1660](https://github.com/grafana/grafana/issues/1660). OAuth: Specify allowed email address domains for google or and github oauth logins
**Fixes**
- [Issue #1649](https://github.com/grafana/grafana/issues/1649). HTTP API: grafana /render calls nows with api keys
- [Issue #1667](https://github.com/grafana/grafana/issues/1667). Data source proxy & session timeout fix (caused 401 Unauthorized error after a while)
- [Issue #1707](https://github.com/grafana/grafana/issues/1707). Unsaved changes: Do not show for snapshots, scripted and file based dashboards
- [Issue #1703](https://github.com/grafana/grafana/issues/1703). Unsaved changes: Do not show for users with role `Viewer`
- [Issue #1675](https://github.com/grafana/grafana/issues/1675). Data source proxy: Fixed issue with Gzip enabled and data source proxy
- [Issue #1681](https://github.com/grafana/grafana/issues/1681). MySQL session: fixed problem using mysql as session store
- [Issue #1671](https://github.com/grafana/grafana/issues/1671). Data sources: Fixed issue with changing default data source (should not require full page load to take effect, now fixed)
- [Issue #1685](https://github.com/grafana/grafana/issues/1685). Search: Dashboard results should be sorted alphabetically
- [Issue #1673](https://github.com/grafana/grafana/issues/1673). Basic auth: Fixed issue when using basic auth proxy infront of Grafana
# 2.0.0-Beta1 (2015-03-30)
**Important Note**
Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated backend server. Please read the [Documentation](http://docs.grafana.org) for more detailed about this SIGNIFICANT change to Grafana
**New features**
- [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site
- [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embed a single graph on another web site
- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes
- [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views
- [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change `now` to be for example `now-1m`, useful when you want to ignore last minute because it contains incomplete data
- [Issue #171](https://github.com/grafana/grafana/issues/171). Panel: Different time periods, panels can override dashboard relative time and/or add a time shift
- [Issue #1488](https://github.com/grafana/grafana/issues/1488). Dashboard: Clone dashboard / Save as
- [Issue #1458](https://github.com/grafana/grafana/issues/1458). User: persisted user option for dark or light theme (no longer an option on a dashboard)
- [Issue #452](https://github.com/grafana/grafana/issues/452). Graph: Adds logarithmic scale option for base 10, base 16 and base 1024
**Enhancements**
- [Issue #1366](https://github.com/grafana/grafana/issues/1366). Graph & Singlestat: Support for additional units, Fahrenheit (°F) and Celsius (°C), Humidity (%H), kW, watt-hour (Wh), kilowatt-hour (kWh), velocities (m/s, km/h, mpg, knot)
- [Issue #978](https://github.com/grafana/grafana/issues/978). Graph: Shared tooltip improvement, can now support metrics of different resolution/intervals
- [Issue #1297](https://github.com/grafana/grafana/issues/1297). Graphite: Added cumulative and minimumBelow graphite functions
- [Issue #1296](https://github.com/grafana/grafana/issues/1296). InfluxDB: Auto escape column names with special characters. Thanks @steven-aerts
- [Issue #1321](https://github.com/grafana/grafana/issues/1321). SingleStatPanel: You can now use template variables in pre & postfix
- [Issue #599](https://github.com/grafana/grafana/issues/599). Graph: Added right y axis label setting and graph support
- [Issue #1253](https://github.com/grafana/grafana/issues/1253). Graph & Singlestat: Users can now set decimal precision for legend and tooltips (override auto precision)
- [Issue #1255](https://github.com/grafana/grafana/issues/1255). Templating: Dashboard will now wait to load until all template variables that have refresh on load set or are initialized via url to be fully loaded and so all variables are in valid state before panels start issuing metric requests.
- [Issue #1344](https://github.com/grafana/grafana/issues/1344). OpenTSDB: Alias patterns (reference tag values), syntax is: \$tag_tagname or [[tag_tagname]]
**Fixes**
- [Issue #1298](https://github.com/grafana/grafana/issues/1298). InfluxDB: Fix handling of empty array in templating variable query
- [Issue #1309](https://github.com/grafana/grafana/issues/1309). Graph: Fixed issue when using zero as a grid threshold
- [Issue #1345](https://github.com/grafana/grafana/issues/1345). UI: Fixed position of confirm modal when scrolled down
- [Issue #1372](https://github.com/grafana/grafana/issues/1372). Graphite: Fix for nested complex queries, where a query references a query that references another query (ie the #[A-Z] syntax)
- [Issue #1363](https://github.com/grafana/grafana/issues/1363). Templating: Fix to allow custom template variables to contain white space, now only splits on ','
- [Issue #1359](https://github.com/grafana/grafana/issues/1359). Graph: Fix for all series tooltip showing series with all null values when `Hide Empty` option is enabled
- [Issue #1497](https://github.com/grafana/grafana/issues/1497). Dashboard: Fixed memory leak when switching dashboards
**Changes**
- Dashboard title change & save will no longer create a new dashboard, it will just change the title.
**OpenTSDB breaking change**
- [Issue #1438](https://github.com/grafana/grafana/issues/1438). OpenTSDB: Automatic downsample interval passed to OpenTSDB (depends on timespan and graph width)
- NOTICE, Downsampling is now enabled by default, so if you have not picked a downsample aggregator in your metric query do so or your graphs will be misleading
- This will make Grafana a lot quicker for OpenTSDB users when viewing large time spans without having to change the downsample interval manually.
**Tech**
- [Issue #1311](https://github.com/grafana/grafana/issues/1311). Tech: Updated Font-Awesome from 3.2 to 4.2
| .changelog-archive/CHANGELOG.2.md | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.00021153228590264916,
0.00017123381257988513,
0.00016031099949032068,
0.00016508449334651232,
0.00001246003284904873
] |
{
"id": 11,
"code_window": [
"}\n",
"\n",
"// AzureMonitorQuery is the query for all the services as they have similar queries\n",
"// with a url, a querystring and an alias field\n",
"type AzureMonitorQuery struct {\n",
"\tURL string\n",
"\tTarget string\n",
"\tParams url.Values\n",
"\tRefID string\n",
"\tAlias string\n",
"\tTimeRange backend.TimeRange\n",
"\tFilter string\n",
"}\n",
"\n",
"// AzureMonitorResponse is the json response from the Azure Monitor API\n",
"type AzureMonitorResponse struct {\n",
"\tCost int `json:\"cost\"`\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tURL string\n",
"\tTarget string\n",
"\tParams url.Values\n",
"\tRefID string\n",
"\tAlias string\n",
"\tTimeRange backend.TimeRange\n",
"\tBodyFilter string\n"
],
"file_path": "pkg/tsdb/azuremonitor/types/types.go",
"type": "replace",
"edit_start_line_idx": 61
} | package metrics
import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"path"
"regexp"
"sort"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"go.opentelemetry.io/otel/attribute"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct {
Proxy types.ServiceProxy
}
var (
// Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
resourceNameLandmark = regexp.MustCompile(`(?i)(/(?P<resourceName>[\w-\.]+)/providers/Microsoft\.Insights/metrics)`)
)
const AzureMonitorAPIVersion = "2021-05-01"
func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.Proxy.Do(rw, req, cli)
}
// executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, logger log.Logger, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse()
ctxLogger := logger.FromContext(ctx)
queries, err := e.buildQueries(ctxLogger, originalQueries, dsInfo)
if err != nil {
return nil, err
}
for _, query := range queries {
result.Responses[query.RefID] = e.executeQuery(ctx, ctxLogger, query, dsInfo, client, url, tracer)
}
return result, nil
}
func (e *AzureMonitorDatasource) buildQueries(logger log.Logger, queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries {
var target string
queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
}
azJSONModel := queryJSONModel.AzureMonitor
// Legacy: If only MetricDefinition is set, use it as namespace
if azJSONModel.MetricDefinition != "" && azJSONModel.MetricNamespace == "" {
azJSONModel.MetricNamespace = azJSONModel.MetricDefinition
}
azJSONModel.DimensionFilters = MigrateDimensionFilters(azJSONModel.DimensionFilters)
alias := azJSONModel.Alias
timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" {
timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil {
return nil, err
}
}
params := url.Values{}
params.Add("api-version", AzureMonitorAPIVersion)
params.Add("timespan", fmt.Sprintf("%v/%v", query.TimeRange.From.UTC().Format(time.RFC3339), query.TimeRange.To.UTC().Format(time.RFC3339)))
params.Add("interval", timeGrain)
params.Add("aggregation", azJSONModel.Aggregation)
params.Add("metricnames", azJSONModel.MetricName)
if azJSONModel.CustomNamespace != "" {
params.Add("metricnamespace", azJSONModel.CustomNamespace)
} else {
params.Add("metricnamespace", azJSONModel.MetricNamespace)
}
azureURL := BuildSubscriptionMetricsURL(queryJSONModel.Subscription)
if azJSONModel.Region != "" {
params.Add("region", azJSONModel.Region)
} else {
// Deprecated, if no region is specified, only one resource group and name is supported
ub := urlBuilder{
ResourceURI: azJSONModel.ResourceURI,
// Alternative, used to reconstruct resource URI if it's not present
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: azJSONModel.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: azJSONModel.ResourceName,
}
azureURL = ub.BuildMetricsURL()
}
// old model
dimension := strings.TrimSpace(azJSONModel.Dimension)
dimensionFilter := strings.TrimSpace(azJSONModel.DimensionFilter)
dimSB := strings.Builder{}
if dimension != "" && dimensionFilter != "" && dimension != "None" && len(azJSONModel.DimensionFilters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '%s'", dimension, dimensionFilter))
} else {
for i, filter := range azJSONModel.DimensionFilters {
if len(filter.Filters) == 0 {
dimSB.WriteString(fmt.Sprintf("%s eq '*'", filter.Dimension))
} else {
dimSB.WriteString(filter.ConstructFiltersString())
}
if i != len(azJSONModel.DimensionFilters)-1 {
dimSB.WriteString(" and ")
}
}
}
resourceIDs := []string{}
for _, r := range azJSONModel.Resources {
ub := urlBuilder{
DefaultSubscription: dsInfo.Settings.SubscriptionId,
Subscription: queryJSONModel.Subscription,
ResourceGroup: r.ResourceGroup,
MetricNamespace: azJSONModel.MetricNamespace,
ResourceName: r.ResourceName,
}
resourceIDs = append(resourceIDs, fmt.Sprintf("Microsoft.ResourceId eq '%s'", ub.buildResourceURI()))
}
filterString := strings.Join(resourceIDs, " or ")
if dimSB.String() != "" {
if filterString != "" {
filterString = fmt.Sprintf("(%s) and (%s)", filterString, dimSB.String())
} else {
filterString = dimSB.String()
}
if azJSONModel.Top != "" {
params.Add("top", azJSONModel.Top)
}
}
target = params.Encode()
if setting.Env == setting.Dev {
logger.Debug("Azuremonitor request", "params", params)
}
azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL,
Target: target,
Params: params,
RefID: query.RefID,
Alias: alias,
TimeRange: query.TimeRange,
Filter: filterString,
})
}
return azureMonitorQueries, nil
}
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, logger log.Logger, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{}
req, err := e.createRequest(ctx, logger, url)
if err != nil {
dataResponse.Error = err
return dataResponse
}
req.URL.Path = path.Join(req.URL.Path, query.URL)
req.URL.RawQuery = query.Params.Encode()
if query.Filter != "" {
req.Method = http.MethodPost
req.Body = io.NopCloser(strings.NewReader(fmt.Sprintf(`{"filter": "%s"}`, query.Filter)))
}
ctx, span := tracer.Start(ctx, "azuremonitor query")
span.SetAttributes("target", query.Target, attribute.Key("target").String(query.Target))
span.SetAttributes("from", query.TimeRange.From.UnixNano()/int64(time.Millisecond), attribute.Key("from").Int64(query.TimeRange.From.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("until", query.TimeRange.To.UnixNano()/int64(time.Millisecond), attribute.Key("until").Int64(query.TimeRange.To.UnixNano()/int64(time.Millisecond)))
span.SetAttributes("datasource_id", dsInfo.DatasourceID, attribute.Key("datasource_id").Int64(dsInfo.DatasourceID))
span.SetAttributes("org_id", dsInfo.OrgID, attribute.Key("org_id").Int64(dsInfo.OrgID))
defer span.End()
tracer.Inject(ctx, req.Header, span)
logger.Debug("AzureMonitor", "Request ApiURL", req.URL.String())
logger.Debug("AzureMonitor", "Target", query.Target)
res, err := cli.Do(req)
if err != nil {
dataResponse.Error = err
return dataResponse
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
data, err := e.unmarshalResponse(logger, res)
if err != nil {
dataResponse.Error = err
return dataResponse
}
azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil {
dataResponse.Error = err
return dataResponse
}
dataResponse.Frames, err = e.parseResponse(data, query, azurePortalUrl)
if err != nil {
dataResponse.Error = err
return dataResponse
}
return dataResponse
}
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, logger log.Logger, url string) (*http.Request, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
if err != nil {
logger.Debug("Failed to create request", "error", err)
return nil, fmt.Errorf("%v: %w", "Failed to create request", err)
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (e *AzureMonitorDatasource) unmarshalResponse(logger log.Logger, res *http.Response) (types.AzureMonitorResponse, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
return types.AzureMonitorResponse{}, err
}
if res.StatusCode/100 != 2 {
logger.Debug("Request failed", "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
}
var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data)
if err != nil {
logger.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return types.AzureMonitorResponse{}, err
}
return data, nil
}
func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 {
return nil, nil
}
frames := data.Frames{}
for _, series := range amr.Value[0].Timeseries {
labels := data.Labels{}
for _, md := range series.Metadatavalues {
labels[md.Name.LocalizedValue] = md.Value
}
frame := data.NewFrameOfFieldTypes("", len(series.Data), data.FieldTypeTime, data.FieldTypeNullableFloat64)
frame.RefID = query.RefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = amr.Value[0].Name.LocalizedValue
dataField.Labels = labels
if amr.Value[0].Unit != "Unspecified" {
dataField.SetConfig(&data.FieldConfig{
Unit: toGrafanaUnit(amr.Value[0].Unit),
})
}
resourceID := labels["microsoft.resourceid"]
resourceIDSlice := strings.Split(resourceID, "/")
resourceName := ""
if len(resourceIDSlice) > 1 {
resourceName = resourceIDSlice[len(resourceIDSlice)-1]
} else {
// Deprecated: This is for backward compatibility, the URL should contain
// the resource ID
resourceName = extractResourceNameFromMetricsURL(query.URL)
resourceID = extractResourceIDFromMetricsURL(query.URL)
}
if query.Alias != "" {
displayName := formatAzureMonitorLegendKey(query.Alias, resourceName,
amr.Value[0].Name.LocalizedValue, "", "", amr.Namespace, amr.Value[0].ID, labels)
if dataField.Config != nil {
dataField.Config.DisplayName = displayName
} else {
dataField.SetConfig(&data.FieldConfig{
DisplayName: displayName,
})
}
}
requestedAgg := query.Params.Get("aggregation")
for i, point := range series.Data {
var value *float64
switch requestedAgg {
case "Average":
value = point.Average
case "Total":
value = point.Total
case "Maximum":
value = point.Maximum
case "Minimum":
value = point.Minimum
case "Count":
value = point.Count
default:
value = point.Count
}
frame.SetRow(i, point.TimeStamp, value)
}
queryUrl, err := getQueryUrl(query, azurePortalUrl, resourceID, resourceName)
if err != nil {
return nil, err
}
frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink)
}
return frames, nil
}
// Gets the deep link for the given query
func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl, resourceID, resourceName string) (string, error) {
aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation")
if aggregation != "" {
if aggType, ok := aggregationTypeMap[aggregation]; ok {
aggregationType = aggType
}
}
timespan, err := json.Marshal(map[string]interface{}{
"absolute": struct {
Start string `json:"startTime"`
End string `json:"endTime"`
}{
Start: query.TimeRange.From.UTC().Format(time.RFC3339Nano),
End: query.TimeRange.To.UTC().Format(time.RFC3339Nano),
},
})
if err != nil {
return "", err
}
escapedTime := url.QueryEscape(string(timespan))
chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{
map[string]interface{}{
"metrics": []types.MetricChartDefinition{
{
ResourceMetadata: map[string]string{
"id": resourceID,
},
Name: query.Params.Get("metricnames"),
AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: resourceName,
},
},
},
},
},
})
if err != nil {
return "", err
}
escapedChart := url.QueryEscape(string(chartDef))
// Azure Portal will timeout if the chart definition includes a space character encoded as '+'.
// url.QueryEscape encodes spaces as '+'.
// Note: this will not encode '+' literals as those are already encoded as '%2B' by url.QueryEscape
escapedChart = strings.ReplaceAll(escapedChart, "+", "%20")
return fmt.Sprintf("%s/#blade/Microsoft_Azure_MonitoringMetrics/Metrics.ReactView/Referer/MetricsExplorer/TimeContext/%s/ChartDefinition/%s", azurePortalUrl, escapedTime, escapedChart), nil
}
// formatAzureMonitorLegendKey builds the legend key or timeseries name
// Alias patterns like {{resourcename}} are replaced with the appropriate data values.
func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string,
metadataValue string, namespace string, seriesID string, labels data.Labels) string {
startIndex := strings.Index(seriesID, "/resourceGroups/") + 16
endIndex := strings.Index(seriesID, "/providers")
resourceGroup := seriesID[startIndex:endIndex]
// Could be a collision problem if there were two keys that varied only in case, but I don't think that would happen in azure.
lowerLabels := data.Labels{}
for k, v := range labels {
lowerLabels[strings.ToLower(k)] = v
}
keys := make([]string, 0, len(labels))
for k := range lowerLabels {
keys = append(keys, k)
}
sort.Strings(keys)
result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))
if metaPartName == "resourcegroup" {
return []byte(resourceGroup)
}
if metaPartName == "namespace" {
return []byte(namespace)
}
if metaPartName == "resourcename" {
return []byte(resourceName)
}
if metaPartName == "metric" {
return []byte(metricName)
}
if metaPartName == "dimensionname" {
if len(keys) == 0 {
return []byte{}
}
return []byte(keys[0])
}
if metaPartName == "dimensionvalue" {
if len(keys) == 0 {
return []byte{}
}
return []byte(lowerLabels[keys[0]])
}
if v, ok := lowerLabels[metaPartName]; ok {
return []byte(v)
}
return in
})
return string(result)
}
// Map values from:
//
// https://docs.microsoft.com/en-us/rest/api/monitor/metrics/list#unit
//
// to
//
// https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts#L24
func toGrafanaUnit(unit string) string {
switch unit {
case "BitsPerSecond":
return "bps"
case "Bytes":
return "decbytes" // or ICE
case "BytesPerSecond":
return "Bps"
case "Count":
return "short" // this is used for integers
case "CountPerSecond":
return "cps"
case "Percent":
return "percent"
case "MilliSeconds":
return "ms"
case "Seconds":
return "s"
}
return unit // this will become a suffix in the display
// "ByteSeconds", "Cores", "MilliCores", and "NanoCores" all both:
// 1. Do not have a corresponding unit in Grafana's current list.
// 2. Do not have the unit listed in any of Azure Monitor's supported metrics anyways.
}
func extractResourceNameFromMetricsURL(url string) string {
matches := resourceNameLandmark.FindStringSubmatch(url)
resourceName := ""
if matches == nil {
return resourceName
}
for i, name := range resourceNameLandmark.SubexpNames() {
if name == "resourceName" {
resourceName = matches[i]
}
}
return resourceName
}
func extractResourceIDFromMetricsURL(url string) string {
return strings.Split(url, "/providers/microsoft.insights/metrics")[0]
}
| pkg/tsdb/azuremonitor/metrics/azuremonitor-datasource.go | 1 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.9895141124725342,
0.1067359521985054,
0.00016849039820954204,
0.0005425256676971912,
0.28388386964797974
] |
{
"id": 11,
"code_window": [
"}\n",
"\n",
"// AzureMonitorQuery is the query for all the services as they have similar queries\n",
"// with a url, a querystring and an alias field\n",
"type AzureMonitorQuery struct {\n",
"\tURL string\n",
"\tTarget string\n",
"\tParams url.Values\n",
"\tRefID string\n",
"\tAlias string\n",
"\tTimeRange backend.TimeRange\n",
"\tFilter string\n",
"}\n",
"\n",
"// AzureMonitorResponse is the json response from the Azure Monitor API\n",
"type AzureMonitorResponse struct {\n",
"\tCost int `json:\"cost\"`\n"
],
"labels": [
"keep",
"keep",
"keep",
"keep",
"keep",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"replace",
"keep",
"keep",
"keep",
"keep",
"keep"
],
"after_edit": [
"\tURL string\n",
"\tTarget string\n",
"\tParams url.Values\n",
"\tRefID string\n",
"\tAlias string\n",
"\tTimeRange backend.TimeRange\n",
"\tBodyFilter string\n"
],
"file_path": "pkg/tsdb/azuremonitor/types/types.go",
"type": "replace",
"edit_start_line_idx": 61
} | {
"version": "0.2.0",
"configurations": [
{
"name": "Run Server",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/pkg/cmd/grafana/",
"env": {},
"cwd": "${workspaceFolder}",
"args": ["server", "--homepath", "${workspaceFolder}", "--packaging", "dev"]
},
{
"name": "Attach to Chrome",
"port": 9222,
"request": "attach",
"type": "chrome",
"webRoot": "${workspaceFolder}"
},
{
"name": "Debug Jest test",
"type": "node",
"request": "launch",
"runtimeExecutable": "yarn",
"runtimeArgs": ["run", "jest", "--runInBand", "${file}"],
"console": "integratedTerminal",
"internalConsoleOptions": "neverOpen",
"port": 9229
}
]
}
| .vscode/launch.json | 0 | https://github.com/grafana/grafana/commit/a7d4bbf0248d0c6e5c6fd9f9ca55ffd399781c12 | [
0.0001721417938824743,
0.00016898411558941007,
0.00016762924497015774,
0.00016808274085633457,
0.000001835517196013825
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.